commit
b4243e1589
188 changed files with 13657 additions and 3742 deletions
51
app.py
51
app.py
|
|
@ -21,6 +21,7 @@ from datetime import datetime
|
|||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.eventManagement import eventManager
|
||||
from modules.workflows.automation import subAutomationSchedule
|
||||
from modules.workflows.automation2 import subAutomation2Schedule
|
||||
from modules.features.automation2.emailPoller import start as startAutomation2EmailPoller
|
||||
from modules.features.automation2.emailPoller import stop as stopAutomation2EmailPoller
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
|
|
@ -355,7 +356,15 @@ async def lifespan(app: FastAPI):
|
|||
logger.warning(f"Could not initialize feature containers: {e}")
|
||||
|
||||
# --- Init Managers ---
|
||||
import asyncio
|
||||
try:
|
||||
main_loop = asyncio.get_running_loop()
|
||||
eventManager.set_event_loop(main_loop)
|
||||
subAutomation2Schedule.set_main_loop(main_loop)
|
||||
except RuntimeError:
|
||||
pass
|
||||
subAutomationSchedule.start(eventUser) # Automation scheduler
|
||||
subAutomation2Schedule.start(eventUser) # Automation2 schedule trigger (cron)
|
||||
# Automation2 email poller: started on-demand when a run pauses for email.checkEmail
|
||||
eventManager.start()
|
||||
|
||||
|
|
@ -374,7 +383,7 @@ async def lifespan(app: FastAPI):
|
|||
if settingsCreated > 0:
|
||||
logger.info(f"Billing startup: Created {settingsCreated} missing mandate billing settings")
|
||||
|
||||
# Step 2: Ensure all users have billing accounts (for PREPAY_USER mandates)
|
||||
# Step 2: Ensure all users have billing audit accounts
|
||||
accountsCreated = billingInterface.ensureAllUserAccountsExist()
|
||||
if accountsCreated > 0:
|
||||
logger.info(f"Billing startup: Created {accountsCreated} missing user accounts")
|
||||
|
|
@ -386,6 +395,7 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
# --- Stop Managers ---
|
||||
stopAutomation2EmailPoller(eventUser) # Automation2 email poller (no-op if not running)
|
||||
subAutomation2Schedule.stop(eventUser) # Automation2 schedule
|
||||
eventManager.stop()
|
||||
subAutomationSchedule.stop(eventUser) # Automation scheduler
|
||||
|
||||
|
|
@ -479,18 +489,6 @@ def getAllowedOrigins():
|
|||
CORS_ORIGIN_REGEX = r"https://.*\.(poweron\.swiss|poweron-center\.net)"
|
||||
|
||||
|
||||
# CORS configuration using environment variables
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=getAllowedOrigins(),
|
||||
allow_origin_regex=CORS_ORIGIN_REGEX,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
expose_headers=["*"],
|
||||
max_age=86400, # Increased caching for preflight requests
|
||||
)
|
||||
|
||||
# SlowAPI rate limiter initialization
|
||||
from modules.auth import limiter
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
|
|
@ -500,7 +498,7 @@ app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
|||
|
||||
|
||||
async def _insufficientBalanceHandler(request: Request, exc: Exception):
|
||||
"""HTTP 402 with structured billing hint (PREPAY_USER vs PREPAY_MANDATE)."""
|
||||
"""HTTP 402 with structured billing hint."""
|
||||
payload = exc.toClientDict() if hasattr(exc, "toClientDict") else {"error": "INSUFFICIENT_BALANCE", "message": str(exc)}
|
||||
return JSONResponse(status_code=402, content={"detail": payload})
|
||||
|
||||
|
|
@ -528,6 +526,19 @@ app.add_middleware(
|
|||
ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5
|
||||
)
|
||||
|
||||
# CORS must be registered LAST so it wraps the whole stack: every response (errors, CSRF 403,
|
||||
# rate limits) still gets Access-Control-Allow-Origin for browser cross-origin calls.
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=getAllowedOrigins(),
|
||||
allow_origin_regex=CORS_ORIGIN_REGEX,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
expose_headers=["*"],
|
||||
max_age=86400,
|
||||
)
|
||||
|
||||
# Include all routers
|
||||
|
||||
from modules.routes.routeAdmin import router as generalRouter
|
||||
|
|
@ -545,6 +556,9 @@ app.include_router(userRouter)
|
|||
from modules.routes.routeDataFiles import router as fileRouter
|
||||
app.include_router(fileRouter)
|
||||
|
||||
from modules.routes.routeDataSources import router as dataSourceRouter
|
||||
app.include_router(dataSourceRouter)
|
||||
|
||||
from modules.routes.routeDataPrompts import router as promptRouter
|
||||
app.include_router(promptRouter)
|
||||
|
||||
|
|
@ -560,9 +574,18 @@ app.include_router(msftRouter)
|
|||
from modules.routes.routeSecurityGoogle import router as googleRouter
|
||||
app.include_router(googleRouter)
|
||||
|
||||
from modules.routes.routeSecurityClickup import router as clickupRouter
|
||||
app.include_router(clickupRouter)
|
||||
|
||||
from modules.routes.routeClickup import router as clickupApiRouter
|
||||
app.include_router(clickupApiRouter)
|
||||
|
||||
from modules.routes.routeVoiceGoogle import router as voiceGoogleRouter
|
||||
app.include_router(voiceGoogleRouter)
|
||||
|
||||
from modules.routes.routeVoiceUser import router as voiceUserRouter
|
||||
app.include_router(voiceUserRouter)
|
||||
|
||||
from modules.routes.routeSecurityAdmin import router as adminSecurityRouter
|
||||
app.include_router(adminSecurityRouter)
|
||||
|
||||
|
|
|
|||
11
env_dev.env
11
env_dev.env
|
|
@ -46,15 +46,20 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = http://localhost:8000/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGWDkxSldfM0NCZ3dmbHY5cS1nQlI3UWZ4ZWRrNVdUdEFKa25RckRiQWY0c1E5MjVsZzlfRkZEU0VFU2tNQ01qZnRNQ0pZVU9hVFN6OEU0RXhwdTl3algzLWJlSXRhYmZlMHltSC1XejlGWEU5TDF1LUlYNEh1aG9tRFI4YmlCYzUyei02U1dabWoyb0N2dVFSb1RhWTNnQjBCZkFjV0FfOWdYdDVpX1k5R2pYM1R6SHRiaE10V1l1dnQybjVHWDRiQUJLM0UxRDZnczhJZGFsc3JhOU82QT09
|
||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGcHNWTWpBWkFHRExtdU01N3RyZzNsMjhUS3NiVTNCZmMwN2NEcFZ6UkQ1a2I0aUkyNU4wR2dUdHJXYmtkaEFRUnFpcThObHBEQmJkdEFnT1FXeUxOTlU3UDFNRzl6LWdpRFpYdExvY3FTTG9MTkswdEhrVkNKQVFucnBjSnhLNm4=
|
||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
||||
STRIPE_API_VERSION = 2026-01-28.clover
|
||||
|
||||
# AI configuration
|
||||
Connector_AiOpenai_API_SECRET = DEV_ENC:Z0FBQUFBQnBaSnM4TWFRRmxVQmNQblVIYmc1Y0Q3aW9zZUtDWlNWdGZjbFpncGp2NHN2QjkxMWxibUJnZDBId252MWk5TXN3Yk14ajFIdi1CTkx2ZWx2QzF5OFR6LUx5azQ3dnNLaXJBOHNxc0tlWmtZcTFVelF4eXBSM2JkbHd2eTM0VHNXdHNtVUprZWtPVzctNlJsZHNmM20tU1N6Q1Q2cHFYSi1tNlhZNDNabTVuaEVGWmIydEhadTcyMlBURmw2aUJxOF9GTzR0dTZiNGZfOFlHaVpPZ1A1LXhhOEFtN1J5TEVNNWtMcGpyNkMzSl8xRnZsaTF1WTZrOUZmb0cxVURjSGFLS2dIYTQyZEJtTm90bEYxVWxNNXVPdTVjaVhYbXhxT3JsVDM5VjZMVFZKSE1tZnM9
|
||||
Connector_AiAnthropic_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpENmFBWG16STFQUVZxNzZZRzRLYTA4X3lRanF1VkF4cU45OExNMzlsQmdISGFxTUxud1dXODBKcFhMVG9KNjdWVnlTTFFROVc3NDlsdlNHLUJXeG41NDBHaXhHR0VHVWl5UW9RNkVWbmlhakRKVW5pM0R4VHk0LUw0TV9LdkljNHdBLXJua21NQkl2b3l4UkVkMGN1YjBrMmJEeWtMay1jbmxrYWJNbUV0aktCXzU1djR2d2RSQXZORTNwcG92ZUVvVGMtQzQzTTVncEZTRGRtZUFIZWQ0dz09
|
||||
Connector_AiPerplexity_API_SECRET = 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
|
||||
Connector_AiPerplexity_API_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5ZmdDZ3hrSElrMnQzNFAtel9wX191VjVzN2g1LWZoa0V1YklubEdmMEJDdEZiR1RWeVZrM3V3enBHX3p6WUtTS0kwYkFyVEF0Nm8zX05CelVQcFJUc0lwVW5iNFczc1p1WWJ2WFBmd0lpLUxxWndEeUh0b2hGUHVpN19vb19nMTBnV1A1VmNpWERVX05lQ29VS20wTjZ3PT0=
|
||||
Connector_AiTavily_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEQTdnUHMwd2pIaXNtMmtCTFREd0pyQXRKb1F5eGtHSnkyOGZiUnlBOFc0b3Vzcndrc3ViRm1nMDJIOEZKYWxqdWNkZGh5N0Z4R0JlQmxXSG5pVnJUR2VYckZhMWNMZ1FNeXJ3enJLVlpiblhOZTNleUg3ZzZyUzRZanFSeDlVMkI=
|
||||
Connector_AiPrivateLlm_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGRHM5eFdUVmVZU1R1cHBwN1RlMUx4T0NlLTJLUFFVX3J2OElDWFpuZmJHVmp4Z3BNNWMwZUVVZUd2TFhRSjVmVkVlcFlVRWtybXh0ZHloZ01ZcnVvX195YjdlWVdEcjZSWFFTTlNBWUlaTlNoLWhqVFBIb0thVlBiaWhjYjFQOFY=
|
||||
Connector_AiMistral_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGeEQxYUIxOHhia0JlQWpWQ2dWQWZzY3l6SWwyUnJoR1hRQWloX2lxb2lGNkc4UnA4U2tWNjJaYzB1d1hvNG9fWUp1N3V4OW9FMGhaWVhjSlVwWEc1X2loVDBSZDEtdHdfcTA5QkcxQTR4OHc4RkRzclJrU2d1RFZpNDJkRDRURlE=
|
||||
|
|
|
|||
11
env_int.env
11
env_int.env
|
|
@ -46,15 +46,20 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = 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
|
||||
STRIPE_WEBHOOK_SECRET = INT_ENC:Z0FBQUFBQnBudkpGamJBNW91VUdEaThWRTFiTWpyb3NqSDJJcGtjNkhUVVZqVElxUWExY05KcllSYVk1SkRuS1NjYWpZUk1uU29nb2pzdXUxRzBsOEgyRWtmUEw3dUF4ejFIXzNwTVZRM1R1bVVhTUs4ZHJMT0V4Xy1pcHVfWlBaQV9wVXo5MGlQYXA=
|
||||
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
||||
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
||||
STRIPE_API_VERSION = 2026-01-28.clover
|
||||
|
||||
# AI configuration
|
||||
Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4MENkQ2xJVmE5WFZKUkh2SHJFby1YVXN3ZmVxRkptS3ZWRmlwdU93ZEJjSjlMV2NGbU5mS3NCdmFfcmFYTEJNZXFIQ3ozTWE4ZC1pemlQNk9wbjU1d3BPS0ZCTTZfOF8yWmVXMWx0TU1DamlJLVFhSTJXclZsY3hMVWlPcXVqQWtMdER4T252NHZUWEhUOTdIN1VGR3ltazEweXFqQ0lvb0hYWmxQQnpxb0JwcFNhRDNGWXdoRTVJWm9FalZpTUF5b1RqZlRaYnVKYkp0NWR5Vko1WWJ0Wmg2VWJzYXZ0Z3Q4UkpsTldDX2dsekhKMmM4YjRoa2RwemMwYVQwM2cyMFlvaU5mOTVTWGlROU8xY2ZVRXlxZzJqWkxURWlGZGI2STZNb0NpdEtWUnM9
|
||||
Connector_AiAnthropic_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjT1ZlRWVJdVZMT3ljSFJDcFdxRFBRVkZhS204NnN5RDBlQ0tpenhTM0FFVktuWW9mWHNwRWx2dHB0eDBSZ0JFQnZKWlp6c01pVGREWHd1eGpERnU0Q2xhaks1clQ1ZXVsdnd2ZzhpNXNQS1BhY3FjSkdkVEhHalNaRGR4emhpakZncnpDQUVxOHVXQzVUWmtQc0FsYmFwTF9TSG5FOUFtWk5Ick1NcHFvY2s1T1c2WXlRUFFJZnh6TWhuaVpMYmppcDR0QUx0a0R6RXlwbGRYb1R4dzJkUT09
|
||||
Connector_AiPerplexity_API_SECRET = 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
|
||||
Connector_AiPerplexity_API_SECRET = INT_ENC:Z0FBQUFBQnB5dkd6UkhtU3lhYmZMSlo0bklQZ2s3UTFBSkprZTNwWkg5Q2lVa0wtenhxWXpva21xVDVMRjdKSmhpTmxWS05IUTRoRHdCbktSRVVjcVFnY1RfV0N2S2dyV0dTMlhxQlRFVm41RkFTWVQzQThuVkZwdlNuVC05QlVRVXB6Qjk3akNpYmY1MFR6R1ByMzlIMllRZlRRYVVRN2ZBPT0=
|
||||
Connector_AiTavily_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkdkJMTDY0akhXNzZDWHVYSEt1cDZoOWEzSktneHZEV2JndTNmWlNSMV9KbFNIZmQzeVlrNE5qUEIwcUlBSGM1a0hOZ3J6djIyOVhnZzI3M1dIUkdicl9FVXF3RGktMmlEYmhnaHJfWTdGUkktSXVUSGdQMC1vSEV6VE8zR2F1SVk=
|
||||
Connector_AiPrivateLlm_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGSjZ1NWh0aWc1R3Z4MHNaeS1HamtUbndhcUZFZDlqUDhjSmg5eHFfdlVkU0RsVkJ2UVRaMWs3aWhraG5jSlc0YkxNWHVmR2JoSW5ENFFCdkJBM0VienlKSnhzNnBKbTJOUTFKczRfWlQ3bWpmUkRTT1I1OGNUSTlQdExacGRpeXg=
|
||||
Connector_AiMistral_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGZTNtZ1E4TWIxSEU1OUlreUpxZkJIR0Vxcm9xRHRUbnBxbTQ1cXlkbnltWkJVdTdMYWZ4c3Fsam42TERWUTVhNzZFMU9xVjdyRGFCYml6bmZsZFd2YmJzemlrSWN6Q3o3X0NXX2xXNUQteTNONHdKYzJ5YVpLLWdhU2JhSTJQZnI=
|
||||
|
|
|
|||
|
|
@ -46,15 +46,20 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = 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
|
||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||
STRIPE_API_VERSION = 2026-01-28.clover
|
||||
|
||||
# AI configuration
|
||||
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
||||
Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
|
||||
Connector_AiPerplexity_API_SECRET = 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
|
||||
Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
|
||||
Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
|
||||
Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
|
||||
Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
|
||||
|
|
|
|||
|
|
@ -18,7 +18,9 @@ from typing import List, Dict, Any, Optional, AsyncGenerator, Union
|
|||
from modules.datamodels.datamodelAi import AiModel, AiModelCall, AiModelResponse
|
||||
|
||||
|
||||
_RETRY_AFTER_PATTERN = _re.compile(r"try again in (\d+(?:\.\d+)?)\s*s", _re.IGNORECASE)
|
||||
_RETRY_AFTER_PATTERN = _re.compile(
|
||||
r"(?:try again in|retry after)\s+(\d+(?:\.\d+)?)\s*s", _re.IGNORECASE
|
||||
)
|
||||
|
||||
|
||||
def _parseRetryAfterSeconds(message: str) -> float:
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ Connects to the private-llm service running on-premise with Ollama backend.
|
|||
Provides OCR and Vision capabilities via local AI models.
|
||||
|
||||
Models:
|
||||
- poweron-ocr-general: Text extraction and OCR (deepseek backend)
|
||||
- poweron-vision-general: General vision tasks (qwen2.5vl backend)
|
||||
- poweron-vision-deep: Deep vision analysis (granite3.2 backend)
|
||||
- poweron-text-general: Text (qwen2.5); NEUTRALIZATION_TEXT + data/plan ops
|
||||
- poweron-vision-general: Vision (qwen2.5vl); IMAGE_ANALYSE + NEUTRALIZATION_IMAGE
|
||||
- poweron-vision-deep: Vision (granite3.2); IMAGE_ANALYSE + NEUTRALIZATION_IMAGE
|
||||
|
||||
Pricing (CHF per call):
|
||||
- Text models: CHF 0.010
|
||||
|
|
@ -22,7 +22,7 @@ import time
|
|||
from typing import List, Optional, Dict, Any
|
||||
from fastapi import HTTPException
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from .aicoreBase import BaseConnectorAi
|
||||
from .aicoreBase import BaseConnectorAi, RateLimitExceededException
|
||||
from modules.datamodels.datamodelAi import (
|
||||
AiModel,
|
||||
PriorityEnum,
|
||||
|
|
@ -245,6 +245,7 @@ class AiPrivateLlm(BaseConnectorAi):
|
|||
(OperationTypeEnum.DATA_ANALYSE, 8),
|
||||
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.NEUTRALIZATION_TEXT, 9),
|
||||
),
|
||||
version="qwen2.5:7b",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: PRICE_TEXT_PER_CALL
|
||||
|
|
@ -270,6 +271,7 @@ class AiPrivateLlm(BaseConnectorAi):
|
|||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 9),
|
||||
(OperationTypeEnum.NEUTRALIZATION_IMAGE, 9),
|
||||
),
|
||||
version="qwen2.5vl:7b",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: PRICE_VISION_PER_CALL
|
||||
|
|
@ -295,6 +297,7 @@ class AiPrivateLlm(BaseConnectorAi):
|
|||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 9),
|
||||
(OperationTypeEnum.NEUTRALIZATION_IMAGE, 9),
|
||||
),
|
||||
version="granite3.2-vision",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: PRICE_VISION_PER_CALL
|
||||
|
|
@ -367,6 +370,9 @@ class AiPrivateLlm(BaseConnectorAi):
|
|||
|
||||
if response.status_code != 200:
|
||||
errorMessage = f"Private-LLM API error: {response.status_code} - {response.text}"
|
||||
if response.status_code == 429:
|
||||
logger.warning(errorMessage)
|
||||
raise RateLimitExceededException(errorMessage)
|
||||
logger.error(errorMessage)
|
||||
raise HTTPException(status_code=500, detail=errorMessage)
|
||||
|
||||
|
|
@ -458,6 +464,9 @@ class AiPrivateLlm(BaseConnectorAi):
|
|||
|
||||
if response.status_code != 200:
|
||||
errorMessage = f"Private-LLM API error: {response.status_code} - {response.text}"
|
||||
if response.status_code == 429:
|
||||
logger.warning(errorMessage)
|
||||
raise RateLimitExceededException(errorMessage)
|
||||
logger.error(errorMessage)
|
||||
raise HTTPException(status_code=500, detail=errorMessage)
|
||||
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@ class CSRFMiddleware(BaseHTTPMiddleware):
|
|||
"/api/google/auth/login/callback",
|
||||
"/api/google/auth/connect",
|
||||
"/api/google/auth/connect/callback",
|
||||
"/api/clickup/auth/connect",
|
||||
"/api/clickup/auth/connect/callback",
|
||||
"/api/billing/webhook/stripe", # Stripe webhook (auth via Stripe-Signature)
|
||||
}
|
||||
|
||||
|
|
@ -86,12 +88,15 @@ class CSRFMiddleware(BaseHTTPMiddleware):
|
|||
content={"detail": "Invalid CSRF token format"}
|
||||
)
|
||||
|
||||
# Additional CSRF validation could be added here:
|
||||
# - Check token against session
|
||||
# - Validate token expiration
|
||||
# - Verify token origin
|
||||
|
||||
try:
|
||||
return await call_next(request)
|
||||
except Exception as exc:
|
||||
logger.error("Unhandled exception in %s %s: %s", request.method, request.url.path, exc)
|
||||
from fastapi.responses import JSONResponse
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"detail": "Internal server error"},
|
||||
)
|
||||
|
||||
def _is_valid_csrf_token(self, token: str) -> bool:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ class TokenManager:
|
|||
# Only allow a new refresh if at least 10 minutes passed since the token was created/refreshed
|
||||
try:
|
||||
nowTs = getUtcTimestamp()
|
||||
createdTs = parseTimestamp(oldToken.createdAt, default=0.0)
|
||||
createdTs = parseTimestamp(oldToken.sysCreatedAt, default=0.0)
|
||||
secondsSinceLastRefresh = nowTs - createdTs
|
||||
if secondsSinceLastRefresh < 10 * 60:
|
||||
logger.info(
|
||||
|
|
|
|||
|
|
@ -5,13 +5,14 @@ import re
|
|||
import psycopg2
|
||||
import psycopg2.extras
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional, Union, get_origin, get_args, Type
|
||||
from typing import List, Dict, Any, Optional, Union, get_origin, get_args, Type, Set, Tuple
|
||||
import uuid
|
||||
from pydantic import BaseModel, Field
|
||||
import threading
|
||||
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.datamodels.datamodelUam import User, AccessLevel, UserPermissions
|
||||
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
|
||||
|
||||
|
|
@ -20,7 +21,7 @@ logger = logging.getLogger(__name__)
|
|||
# No mapping needed - table name = Pydantic model name exactly
|
||||
|
||||
|
||||
class SystemTable(BaseModel):
|
||||
class SystemTable(PowerOnModel):
|
||||
"""Data model for system table entries"""
|
||||
|
||||
table_name: str = Field(
|
||||
|
|
@ -157,6 +158,88 @@ def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context:
|
|||
logger.warning(f"Could not parse JSONB field {fieldName}, keeping as string ({context})")
|
||||
|
||||
|
||||
# Legacy column names (historical _* internal names and old camelCase audit fields) -> PowerOn sys* columns.
|
||||
# Order matters: more specific / underscore names first; first successful copy wins per cell via IS NULL on target.
|
||||
_LEGACY_FIELD_TO_SYS: Tuple[Tuple[str, str], ...] = (
|
||||
("_createdAt", "sysCreatedAt"),
|
||||
("_createdBy", "sysCreatedBy"),
|
||||
("_modifiedAt", "sysModifiedAt"),
|
||||
("_modifiedBy", "sysModifiedBy"),
|
||||
("createdAt", "sysCreatedAt"),
|
||||
("creationDate", "sysCreatedAt"),
|
||||
("updatedAt", "sysModifiedAt"),
|
||||
("lastModified", "sysModifiedAt"),
|
||||
)
|
||||
|
||||
|
||||
def _quotePgIdent(name: str) -> str:
|
||||
return '"' + str(name).replace('"', '""') + '"'
|
||||
|
||||
|
||||
def _resolveColumnCaseInsensitive(cols: Set[str], logicalName: str) -> Optional[str]:
|
||||
"""Match information_schema column_name to logical CamelCase (PG folds unquoted legacy names to lowercase)."""
|
||||
if not logicalName or not cols:
|
||||
return None
|
||||
for c in cols:
|
||||
if c.lower() == logicalName.lower():
|
||||
return c
|
||||
return None
|
||||
|
||||
|
||||
def _pgColumnDataType(cursor, tablePg: str, colPg: str) -> Optional[str]:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT data_type FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND table_name = %s AND column_name = %s
|
||||
""",
|
||||
(tablePg, colPg),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
return row["data_type"] if row else None
|
||||
|
||||
|
||||
def _legacySourceToSysSqlExpr(srcIdent: str, srcType: Optional[str], tgtType: Optional[str]) -> str:
|
||||
"""Build RHS for UPDATE sys* = expr from legacy _* column (handles text/timestamp -> double precision)."""
|
||||
s = _quotePgIdent(srcIdent)
|
||||
sl = (srcType or "").lower()
|
||||
tl = (tgtType or "").lower()
|
||||
if "double" in tl or tl == "real" or tl == "numeric":
|
||||
if any(x in sl for x in ("double precision", "real", "numeric", "integer", "bigint", "smallint")):
|
||||
return f"{s}::double precision"
|
||||
if "timestamp" in sl or sl == "date":
|
||||
return f"EXTRACT(EPOCH FROM {s}::timestamptz)"
|
||||
if "text" in sl or "character" in sl or sl == "uuid":
|
||||
return (
|
||||
f"CASE WHEN trim({s}::text) ~ '^[+-]?[0-9]+(\\.[0-9]*)?([eE][+-]?[0-9]+)?$' "
|
||||
f"THEN trim({s}::text)::double precision "
|
||||
f"ELSE EXTRACT(EPOCH FROM trim({s}::text)::timestamptz) END"
|
||||
)
|
||||
return s
|
||||
return s
|
||||
|
||||
|
||||
def _listPublicBaseTableNames(cursor) -> List[str]:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT table_name FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
"""
|
||||
)
|
||||
return [row["table_name"] for row in cursor.fetchall()]
|
||||
|
||||
|
||||
def _listTableColumnNames(cursor, tableName: str) -> Set[str]:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT column_name FROM information_schema.columns
|
||||
WHERE table_schema = 'public' AND table_name = %s
|
||||
""",
|
||||
(tableName,),
|
||||
)
|
||||
return {row["column_name"] for row in cursor.fetchall()}
|
||||
|
||||
|
||||
# Cache connectors by (host, database, port) to avoid duplicate inits for same database.
|
||||
# Thread safety: _connector_cache_lock protects cache access. userId is request-scoped via
|
||||
# contextvars to avoid races when concurrent requests share the same connector.
|
||||
|
|
@ -178,7 +261,7 @@ def _get_cached_connector(
|
|||
userId: str = None,
|
||||
) -> "DatabaseConnector":
|
||||
"""Return cached DatabaseConnector for same (host, database, port) to avoid duplicate PostgreSQL inits.
|
||||
Uses contextvars for userId so concurrent requests sharing the same connector get correct _createdBy/_modifiedBy.
|
||||
Uses contextvars for userId so concurrent requests sharing the same connector get correct sysCreatedBy/sysModifiedBy.
|
||||
"""
|
||||
port = int(dbPort) if dbPort is not None else 5432
|
||||
key = (dbHost, dbDatabase, port)
|
||||
|
|
@ -327,8 +410,10 @@ class DatabaseConnector:
|
|||
id SERIAL PRIMARY KEY,
|
||||
table_name VARCHAR(255) UNIQUE NOT NULL,
|
||||
initial_id VARCHAR(255) NOT NULL,
|
||||
_createdAt DOUBLE PRECISION,
|
||||
_modifiedAt DOUBLE PRECISION
|
||||
"sysCreatedAt" DOUBLE PRECISION,
|
||||
"sysCreatedBy" VARCHAR(255),
|
||||
"sysModifiedAt" DOUBLE PRECISION,
|
||||
"sysModifiedBy" VARCHAR(255)
|
||||
)
|
||||
""")
|
||||
conn.close()
|
||||
|
|
@ -371,6 +456,63 @@ class DatabaseConnector:
|
|||
logger.warning(f"Connection lost, reconnecting: {e}")
|
||||
self._connect()
|
||||
|
||||
def migrateLegacyUnderscoreSysColumns(self) -> int:
|
||||
"""
|
||||
Scan all public base tables on this connection's database. Where both a legacy
|
||||
source column (any case: _createdAt, createdAt, creationDate, …) and the matching
|
||||
sys* column exist, UPDATE sys* from legacy where sys* IS NULL AND legacy IS NOT NULL.
|
||||
Idempotent; run after schema adds sys* columns (see _ensureTableExists).
|
||||
"""
|
||||
self._ensure_connection()
|
||||
total = 0
|
||||
try:
|
||||
with self.connection.cursor() as cursor:
|
||||
tableNames = _listPublicBaseTableNames(cursor)
|
||||
for table in tableNames:
|
||||
with self.connection.cursor() as cursor:
|
||||
cols = _listTableColumnNames(cursor, table)
|
||||
for legacyLogical, sysLogical in _LEGACY_FIELD_TO_SYS:
|
||||
src = _resolveColumnCaseInsensitive(cols, legacyLogical)
|
||||
tgt = _resolveColumnCaseInsensitive(cols, sysLogical)
|
||||
if not src or not tgt or src == tgt:
|
||||
continue
|
||||
try:
|
||||
with self.connection.cursor() as cursor:
|
||||
srcType = _pgColumnDataType(cursor, table, src)
|
||||
tgtType = _pgColumnDataType(cursor, table, tgt)
|
||||
expr = _legacySourceToSysSqlExpr(src, srcType, tgtType)
|
||||
tq = _quotePgIdent(table)
|
||||
tr = _quotePgIdent(tgt)
|
||||
sr = _quotePgIdent(src)
|
||||
sql = (
|
||||
f"UPDATE {tq} SET {tr} = {expr} "
|
||||
f"WHERE {tr} IS NULL AND {sr} IS NOT NULL"
|
||||
)
|
||||
cursor.execute(sql)
|
||||
n = cursor.rowcount
|
||||
self.connection.commit()
|
||||
total += n
|
||||
except Exception as e:
|
||||
try:
|
||||
self.connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
logger.debug(
|
||||
f"migrateLegacyUnderscoreSysColumns skip {self.dbDatabase}.{table} "
|
||||
f"{src}->{tgt}: {e}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"migrateLegacyUnderscoreSysColumns failed on {self.dbDatabase}: {e}")
|
||||
try:
|
||||
self.connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
if total:
|
||||
logger.info(
|
||||
f"migrateLegacyUnderscoreSysColumns: {total} cell(s) in {self.dbDatabase}"
|
||||
)
|
||||
return total
|
||||
|
||||
def _initializeSystemTable(self):
|
||||
"""Initializes the system table if it doesn't exist yet."""
|
||||
try:
|
||||
|
|
@ -416,7 +558,7 @@ class DatabaseConnector:
|
|||
for table_name, initial_id in data.items():
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO "_system" ("table_name", "initial_id", "_modifiedAt")
|
||||
INSERT INTO "_system" ("table_name", "initial_id", "sysModifiedAt")
|
||||
VALUES (%s, %s, %s)
|
||||
""",
|
||||
(table_name, initial_id, getUtcTimestamp()),
|
||||
|
|
@ -448,8 +590,10 @@ class DatabaseConnector:
|
|||
CREATE TABLE "{self._systemTableName}" (
|
||||
"table_name" VARCHAR(255) PRIMARY KEY,
|
||||
"initial_id" VARCHAR(255),
|
||||
"_createdAt" DOUBLE PRECISION,
|
||||
"_modifiedAt" DOUBLE PRECISION
|
||||
"sysCreatedAt" DOUBLE PRECISION,
|
||||
"sysCreatedBy" VARCHAR(255),
|
||||
"sysModifiedAt" DOUBLE PRECISION,
|
||||
"sysModifiedBy" VARCHAR(255)
|
||||
)
|
||||
""")
|
||||
logger.info("System table created successfully")
|
||||
|
|
@ -464,9 +608,15 @@ class DatabaseConnector:
|
|||
)
|
||||
existing_columns = [row["column_name"] for row in cursor.fetchall()]
|
||||
|
||||
if "_modifiedAt" not in existing_columns:
|
||||
for sys_col, sys_sql in [
|
||||
("sysCreatedAt", "DOUBLE PRECISION"),
|
||||
("sysCreatedBy", "VARCHAR(255)"),
|
||||
("sysModifiedAt", "DOUBLE PRECISION"),
|
||||
("sysModifiedBy", "VARCHAR(255)"),
|
||||
]:
|
||||
if sys_col not in existing_columns:
|
||||
cursor.execute(
|
||||
f'ALTER TABLE "{self._systemTableName}" ADD COLUMN "_modifiedAt" DOUBLE PRECISION'
|
||||
f'ALTER TABLE "{self._systemTableName}" ADD COLUMN "{sys_col}" {sys_sql}'
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
@ -484,6 +634,7 @@ class DatabaseConnector:
|
|||
|
||||
try:
|
||||
self._ensure_connection()
|
||||
schemaTouched = False
|
||||
|
||||
with self.connection.cursor() as cursor:
|
||||
# Check if table exists by querying information_schema with case-insensitive search
|
||||
|
|
@ -502,6 +653,7 @@ class DatabaseConnector:
|
|||
logger.info(
|
||||
f"Created table '{table}' with columns from Pydantic model"
|
||||
)
|
||||
schemaTouched = True
|
||||
else:
|
||||
# Table exists: ensure all columns from model are present (simple additive migration)
|
||||
try:
|
||||
|
|
@ -518,11 +670,7 @@ class DatabaseConnector:
|
|||
|
||||
# Desired columns based on model
|
||||
model_fields = _get_model_fields(model_class)
|
||||
desired_columns = (
|
||||
set(["id"])
|
||||
| set(model_fields.keys())
|
||||
| {"_createdAt", "_modifiedAt", "_createdBy", "_modifiedBy"}
|
||||
)
|
||||
desired_columns = set(["id"]) | set(model_fields.keys())
|
||||
|
||||
# Add missing columns
|
||||
for col in sorted(desired_columns - existing_columns):
|
||||
|
|
@ -530,12 +678,6 @@ class DatabaseConnector:
|
|||
if col in ["id"]:
|
||||
continue # primary key exists already
|
||||
sql_type = model_fields.get(col)
|
||||
if col in ["_createdAt"]:
|
||||
sql_type = "DOUBLE PRECISION"
|
||||
elif col in ["_modifiedAt"]:
|
||||
sql_type = "DOUBLE PRECISION"
|
||||
elif col in ["_createdBy", "_modifiedBy"]:
|
||||
sql_type = "VARCHAR(255)"
|
||||
if not sql_type:
|
||||
sql_type = "TEXT"
|
||||
try:
|
||||
|
|
@ -545,6 +687,7 @@ class DatabaseConnector:
|
|||
logger.info(
|
||||
f"Added missing column '{col}' ({sql_type}) to '{table}'"
|
||||
)
|
||||
schemaTouched = True
|
||||
except Exception as add_err:
|
||||
logger.warning(
|
||||
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||
|
|
@ -555,6 +698,23 @@ class DatabaseConnector:
|
|||
)
|
||||
|
||||
self.connection.commit()
|
||||
if schemaTouched:
|
||||
try:
|
||||
n = self.migrateLegacyUnderscoreSysColumns()
|
||||
if n:
|
||||
logger.info(
|
||||
"After schema change on %s.%s: legacy -> sys* migration wrote %s cell(s)",
|
||||
self.dbDatabase,
|
||||
table,
|
||||
n,
|
||||
)
|
||||
except Exception as mig_err:
|
||||
logger.error(
|
||||
"migrateLegacyUnderscoreSysColumns failed after schema change %s.%s: %s",
|
||||
self.dbDatabase,
|
||||
table,
|
||||
mig_err,
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error ensuring table {table} exists: {e}")
|
||||
|
|
@ -594,16 +754,6 @@ class DatabaseConnector:
|
|||
if field_name != "id": # Skip id, already defined
|
||||
columns.append(f'"{field_name}" {sql_type}')
|
||||
|
||||
# Add metadata columns
|
||||
columns.extend(
|
||||
[
|
||||
'"_createdAt" DOUBLE PRECISION',
|
||||
'"_modifiedAt" DOUBLE PRECISION',
|
||||
'"_createdBy" VARCHAR(255)',
|
||||
'"_modifiedBy" VARCHAR(255)',
|
||||
]
|
||||
)
|
||||
|
||||
# Create table
|
||||
sql = f'CREATE TABLE IF NOT EXISTS "{table}" ({", ".join(columns)})'
|
||||
cursor.execute(sql)
|
||||
|
|
@ -626,11 +776,7 @@ class DatabaseConnector:
|
|||
"""Save record to normalized table with explicit columns."""
|
||||
# Get columns from Pydantic model instead of database schema
|
||||
fields = _get_model_fields(model_class)
|
||||
columns = (
|
||||
["id"]
|
||||
+ [field for field in fields.keys() if field != "id"]
|
||||
+ ["_createdAt", "_createdBy", "_modifiedAt", "_modifiedBy"]
|
||||
)
|
||||
columns = ["id"] + [field for field in fields.keys() if field != "id"]
|
||||
|
||||
if not columns:
|
||||
logger.error(f"No columns found for table {table}")
|
||||
|
|
@ -648,7 +794,7 @@ class DatabaseConnector:
|
|||
value = filtered_record.get(col)
|
||||
|
||||
# Handle timestamp fields - store as Unix timestamps (floats) for consistency
|
||||
if col in ["_createdAt", "_modifiedAt"] and value is not None:
|
||||
if col in ["sysCreatedAt", "sysModifiedAt"] and value is not None:
|
||||
if isinstance(value, str):
|
||||
# Try to parse string as timestamp
|
||||
try:
|
||||
|
|
@ -690,7 +836,7 @@ class DatabaseConnector:
|
|||
[
|
||||
f'"{col}" = EXCLUDED."{col}"'
|
||||
for col in columns[1:]
|
||||
if col not in ["_createdAt", "_createdBy"]
|
||||
if col not in ["sysCreatedAt", "sysCreatedBy"]
|
||||
]
|
||||
)
|
||||
|
||||
|
|
@ -723,6 +869,10 @@ class DatabaseConnector:
|
|||
logger.error(f"Error loading record {recordId} from table {table}: {e}")
|
||||
return None
|
||||
|
||||
def getRecord(self, model_class: type, recordId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load one row by primary key (routes / services; wraps _loadRecord)."""
|
||||
return self._loadRecord(model_class, str(recordId))
|
||||
|
||||
def _saveRecord(
|
||||
self, model_class: type, recordId: str, record: Dict[str, Any]
|
||||
) -> bool:
|
||||
|
|
@ -742,17 +892,19 @@ class DatabaseConnector:
|
|||
if effective_user_id is None:
|
||||
effective_user_id = self.userId
|
||||
currentTime = getUtcTimestamp()
|
||||
# Set _createdAt and _createdBy if this is a new record (record doesn't have _createdAt)
|
||||
if "_createdAt" not in record:
|
||||
record["_createdAt"] = currentTime
|
||||
# Set sysCreatedAt/sysCreatedBy on first persist; always refresh modified fields.
|
||||
# Treat None and 0 as unset (legacy rows / bad defaults); model_dump often has sysCreatedAt=None.
|
||||
createdTs = record.get("sysCreatedAt")
|
||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||
record["sysCreatedAt"] = currentTime
|
||||
if effective_user_id:
|
||||
record["_createdBy"] = effective_user_id
|
||||
elif "_createdBy" not in record or not record.get("_createdBy"):
|
||||
record["sysCreatedBy"] = effective_user_id
|
||||
elif not record.get("sysCreatedBy"):
|
||||
if effective_user_id:
|
||||
record["_createdBy"] = effective_user_id
|
||||
record["_modifiedAt"] = currentTime
|
||||
record["sysCreatedBy"] = effective_user_id
|
||||
record["sysModifiedAt"] = currentTime
|
||||
if effective_user_id:
|
||||
record["_modifiedBy"] = effective_user_id
|
||||
record["sysModifiedBy"] = effective_user_id
|
||||
|
||||
with self.connection.cursor() as cursor:
|
||||
self._save_record(cursor, table, recordId, record, model_class)
|
||||
|
|
@ -840,6 +992,26 @@ class DatabaseConnector:
|
|||
logger.error(f"Error removing initial ID for table {table}: {e}")
|
||||
return False
|
||||
|
||||
def buildRbacWhereClause(
|
||||
self,
|
||||
permissions: UserPermissions,
|
||||
currentUser: User,
|
||||
table: str,
|
||||
mandateId: Optional[str] = None,
|
||||
featureInstanceId: Optional[str] = None,
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Delegate to interfaceRbac.buildRbacWhereClause (tests and call sites use connector as entry)."""
|
||||
from modules.interfaces.interfaceRbac import buildRbacWhereClause as _buildRbacWhereClause
|
||||
|
||||
return _buildRbacWhereClause(
|
||||
permissions,
|
||||
currentUser,
|
||||
table,
|
||||
self,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
)
|
||||
|
||||
def updateContext(self, userId: str) -> None:
|
||||
"""Updates the context of the database connector.
|
||||
Sets both instance userId and contextvar for request-scoped use when connector is shared.
|
||||
|
|
@ -992,10 +1164,6 @@ class DatabaseConnector:
|
|||
Returns (where_clause, order_clause, limit_clause, values, count_values).
|
||||
"""
|
||||
fields = _get_model_fields(model_class)
|
||||
fields["_createdAt"] = "DOUBLE PRECISION"
|
||||
fields["_modifiedAt"] = "DOUBLE PRECISION"
|
||||
fields["_createdBy"] = "TEXT"
|
||||
fields["_modifiedBy"] = "TEXT"
|
||||
validColumns = set(fields.keys())
|
||||
where_parts: List[str] = []
|
||||
values: List[Any] = []
|
||||
|
|
@ -1026,6 +1194,9 @@ class DatabaseConnector:
|
|||
continue
|
||||
colType = fields.get(key, "TEXT")
|
||||
logger.debug(f"_buildPaginationClauses: filter key='{key}' val={val!r} type(val)={type(val).__name__} colType={colType}")
|
||||
if val is None:
|
||||
where_parts.append(f'"{key}" IS NULL')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
|
|
@ -1190,10 +1361,6 @@ class DatabaseConnector:
|
|||
"""
|
||||
table = model_class.__name__
|
||||
fields = _get_model_fields(model_class)
|
||||
fields["_createdAt"] = "DOUBLE PRECISION"
|
||||
fields["_modifiedAt"] = "DOUBLE PRECISION"
|
||||
fields["_createdBy"] = "TEXT"
|
||||
fields["_modifiedBy"] = "TEXT"
|
||||
|
||||
if column not in fields:
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -52,6 +52,12 @@ class ConnectorResolver:
|
|||
except ImportError:
|
||||
logger.debug("FtpConnector not available (stub)")
|
||||
|
||||
try:
|
||||
from modules.connectors.providerClickup.connectorClickup import ClickupConnector
|
||||
ConnectorResolver._providerRegistry["clickup"] = ClickupConnector
|
||||
except ImportError:
|
||||
logger.warning("ClickupConnector not available")
|
||||
|
||||
async def resolve(self, connectionId: str) -> ProviderConnector:
|
||||
"""Resolve connectionId to a ProviderConnector with a fresh access token."""
|
||||
connection = await self._loadConnection(connectionId)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from typing import Optional
|
|||
import logging
|
||||
import aiohttp
|
||||
from modules.datamodels.datamodelTickets import TicketBase, TicketFieldAttribute
|
||||
from modules.serviceCenter.services.serviceClickup.mainServiceClickup import clickup_authorization_header
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -30,7 +31,7 @@ class ConnectorTicketClickup(TicketBase):
|
|||
|
||||
def _headers(self) -> dict:
|
||||
return {
|
||||
"Authorization": self.apiToken,
|
||||
"Authorization": clickup_authorization_header(self.apiToken),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,11 @@ from modules.shared.configuration import APP_CONFIG
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Gemini-TTS speaker IDs from voices.list use short names (e.g. "Kore") and require model_name + prompt.
|
||||
_GEMINI_TTS_DEFAULT_MODEL = "gemini-2.5-flash-tts"
|
||||
_GEMINI_TTS_NEUTRAL_PROMPT = "Say the following"
|
||||
|
||||
|
||||
class ConnectorGoogleSpeech:
|
||||
"""
|
||||
Google Cloud Speech-to-Text and Translation connector.
|
||||
|
|
@ -902,6 +907,13 @@ class ConnectorGoogleSpeech:
|
|||
"error": f"Validation error: {e}"
|
||||
}
|
||||
|
||||
def _isGeminiTtsSpeakerVoiceName(self, voiceName: str) -> bool:
|
||||
"""True when voice name is a Gemini-TTS speaker id (no BCP-47 prefix like en-US-...)."""
|
||||
if not voiceName or not isinstance(voiceName, str):
|
||||
return False
|
||||
stripped = voiceName.strip()
|
||||
return bool(stripped) and "-" not in stripped
|
||||
|
||||
async def textToSpeech(self, text: str, languageCode: str = "de-DE", voiceName: str = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert text to speech using Google Cloud Text-to-Speech.
|
||||
|
|
@ -917,9 +929,6 @@ class ConnectorGoogleSpeech:
|
|||
try:
|
||||
logger.info(f"Converting text to speech: '{text[:50]}...' in {languageCode}")
|
||||
|
||||
# Set up the synthesis input
|
||||
synthesisInput = texttospeech.SynthesisInput(text=text)
|
||||
|
||||
# Build the voice request
|
||||
selectedVoice = voiceName or self._getDefaultVoice(languageCode)
|
||||
|
||||
|
|
@ -931,10 +940,23 @@ class ConnectorGoogleSpeech:
|
|||
|
||||
logger.info(f"Using TTS voice: {selectedVoice} for language: {languageCode}")
|
||||
|
||||
if self._isGeminiTtsSpeakerVoiceName(selectedVoice):
|
||||
synthesisInput = texttospeech.SynthesisInput(
|
||||
text=text,
|
||||
prompt=_GEMINI_TTS_NEUTRAL_PROMPT,
|
||||
)
|
||||
voice = texttospeech.VoiceSelectionParams(
|
||||
language_code=languageCode,
|
||||
name=selectedVoice,
|
||||
ssml_gender=texttospeech.SsmlVoiceGender.NEUTRAL
|
||||
model_name=_GEMINI_TTS_DEFAULT_MODEL,
|
||||
ssml_gender=texttospeech.SsmlVoiceGender.NEUTRAL,
|
||||
)
|
||||
else:
|
||||
synthesisInput = texttospeech.SynthesisInput(text=text)
|
||||
voice = texttospeech.VoiceSelectionParams(
|
||||
language_code=languageCode,
|
||||
name=selectedVoice,
|
||||
ssml_gender=texttospeech.SsmlVoiceGender.NEUTRAL,
|
||||
)
|
||||
|
||||
# Select the type of audio file to return
|
||||
|
|
@ -1059,7 +1081,8 @@ class ConnectorGoogleSpeech:
|
|||
"language_codes": list(voice.language_codes) if voice.language_codes else [],
|
||||
"gender": gender,
|
||||
"ssml_gender": voice.ssml_gender.name if voice.ssml_gender else "NEUTRAL",
|
||||
"natural_sample_rate_hertz": voice.natural_sample_rate_hertz
|
||||
"natural_sample_rate_hertz": voice.natural_sample_rate_hertz,
|
||||
"geminiTts": self._isGeminiTtsSpeakerVoiceName(voice.name or ""),
|
||||
}
|
||||
|
||||
# Include any additional fields if available from Google API
|
||||
|
|
|
|||
7
modules/connectors/providerClickup/__init__.py
Normal file
7
modules/connectors/providerClickup/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp provider connector."""
|
||||
|
||||
from .connectorClickup import ClickupConnector
|
||||
|
||||
__all__ = ["ClickupConnector"]
|
||||
268
modules/connectors/providerClickup/connectorClickup.py
Normal file
268
modules/connectors/providerClickup/connectorClickup.py
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp ProviderConnector — virtual paths for teams → lists → tasks (table rows).
|
||||
|
||||
Path convention (leading slash, no trailing slash except root):
|
||||
/ — authorized workspaces (teams)
|
||||
/team/{teamId} — spaces in the workspace
|
||||
/team/{teamId}/space/{spaceId} — folders + folderless lists
|
||||
/team/{teamId}/space/{spaceId}/folder/{folderId} — lists in folder
|
||||
/team/{teamId}/list/{listId} — tasks in list (rows)
|
||||
/team/{teamId}/list/{listId}/task/{taskId} — single task (download = JSON)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from modules.connectors.connectorProviderBase import (
|
||||
ProviderConnector,
|
||||
ServiceAdapter,
|
||||
DownloadResult,
|
||||
)
|
||||
from modules.datamodels.datamodelDataSource import ExternalEntry
|
||||
from modules.serviceCenter.services.serviceClickup.mainServiceClickup import ClickupService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# type metadata for ExternalEntry.metadata["cuType"]
|
||||
_CU_TEAM = "team"
|
||||
_CU_SPACE = "space"
|
||||
_CU_FOLDER = "folder"
|
||||
_CU_LIST = "list"
|
||||
_CU_TASK = "task"
|
||||
|
||||
|
||||
def _norm(path: str) -> str:
|
||||
p = (path or "").strip() or "/"
|
||||
if not p.startswith("/"):
|
||||
p = "/" + p
|
||||
if p != "/" and p.endswith("/"):
|
||||
p = p.rstrip("/")
|
||||
return p
|
||||
|
||||
|
||||
class ClickupListsAdapter(ServiceAdapter):
|
||||
"""Maps ClickUp hierarchy + list tasks to browse/download/upload/search."""
|
||||
|
||||
def __init__(self, access_token: str):
|
||||
self._token = access_token
|
||||
# Minimal service instance for API calls (no ServiceCenter context)
|
||||
self._svc = ClickupService(context=None, get_service=lambda _: None)
|
||||
self._svc.setAccessToken(access_token)
|
||||
|
||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
||||
p = _norm(path)
|
||||
out: List[ExternalEntry] = []
|
||||
|
||||
if p == "/":
|
||||
data = await self._svc.getAuthorizedTeams()
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
logger.warning(f"ClickUp browse root: {data.get('error')}")
|
||||
return []
|
||||
teams = data.get("teams", []) if isinstance(data, dict) else []
|
||||
for t in teams:
|
||||
tid = str(t.get("id", ""))
|
||||
name = t.get("name") or tid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{tid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_TEAM, "id": tid, "raw": t},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)$", p)
|
||||
if m:
|
||||
team_id = m.group(1)
|
||||
data = await self._svc.getSpaces(team_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return []
|
||||
spaces = data.get("spaces", []) if isinstance(data, dict) else []
|
||||
for s in spaces:
|
||||
sid = str(s.get("id", ""))
|
||||
name = s.get("name") or sid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/space/{sid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_SPACE, "id": sid, "raw": s},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/space/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, space_id = m.group(1), m.group(2)
|
||||
folders_r = await self._svc.getFolders(space_id)
|
||||
lists_r = await self._svc.getFolderlessLists(space_id)
|
||||
if isinstance(folders_r, dict) and not folders_r.get("error"):
|
||||
for f in folders_r.get("folders", []) or []:
|
||||
fid = str(f.get("id", ""))
|
||||
name = f.get("name") or fid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/space/{space_id}/folder/{fid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_FOLDER, "id": fid, "raw": f},
|
||||
)
|
||||
)
|
||||
if isinstance(lists_r, dict) and not lists_r.get("error"):
|
||||
for lst in lists_r.get("lists", []) or []:
|
||||
lid = str(lst.get("id", ""))
|
||||
name = lst.get("name") or lid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_LIST, "id": lid, "raw": lst},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/space/([^/]+)/folder/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, _space_id, folder_id = m.group(1), m.group(2), m.group(3)
|
||||
data = await self._svc.getListsInFolder(folder_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return []
|
||||
for lst in data.get("lists", []) or []:
|
||||
lid = str(lst.get("id", ""))
|
||||
name = lst.get("name") or lid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_LIST, "id": lid, "raw": lst},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, list_id = m.group(1), m.group(2)
|
||||
page = 0
|
||||
while True:
|
||||
data = await self._svc.getTasksInList(list_id, page=page)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
break
|
||||
tasks = data.get("tasks", []) if isinstance(data, dict) else []
|
||||
for task in tasks:
|
||||
tid = str(task.get("id", ""))
|
||||
name = task.get("name") or tid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{list_id}/task/{tid}",
|
||||
isFolder=False,
|
||||
metadata={
|
||||
"cuType": _CU_TASK,
|
||||
"id": tid,
|
||||
"task": task,
|
||||
},
|
||||
)
|
||||
)
|
||||
if len(tasks) < 100:
|
||||
break
|
||||
page += 1
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, list_id, task_id = m.group(1), m.group(2), m.group(3)
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=f"task-{task_id}.json",
|
||||
path=p,
|
||||
isFolder=False,
|
||||
metadata={"cuType": _CU_TASK, "id": task_id, "listId": list_id, "teamId": team_id},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
logger.warning(f"ClickUp browse: unsupported path {p}")
|
||||
return []
|
||||
|
||||
async def download(self, path: str) -> Any:
|
||||
p = _norm(path)
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if not m:
|
||||
return b""
|
||||
task_id = m.group(3)
|
||||
data = await self._svc.getTask(task_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return json.dumps(data).encode("utf-8")
|
||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
"""Upload attachment to a task. Path must be .../list/{listId}/task/{taskId}."""
|
||||
p = _norm(path)
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if not m:
|
||||
return {"error": "Path must be /team/{teamId}/list/{listId}/task/{taskId} for upload"}
|
||||
task_id = m.group(3)
|
||||
return await self._svc.uploadTaskAttachment(task_id, data, fileName)
|
||||
|
||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
||||
base = _norm(path or "/")
|
||||
team_id: Optional[str] = None
|
||||
mt = re.match(r"^/team/([^/]+)", base)
|
||||
if mt:
|
||||
team_id = mt.group(1)
|
||||
if not team_id:
|
||||
teams = await self._svc.getAuthorizedTeams()
|
||||
if not isinstance(teams, dict) or teams.get("error"):
|
||||
return []
|
||||
tl = teams.get("teams") or []
|
||||
if not tl:
|
||||
return []
|
||||
team_id = str(tl[0].get("id", ""))
|
||||
|
||||
out: List[ExternalEntry] = []
|
||||
page = 0
|
||||
while True:
|
||||
data = await self._svc.searchTeamTasks(team_id, query=query, page=page)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
break
|
||||
tasks = data.get("tasks", []) if isinstance(data, dict) else []
|
||||
for task in tasks:
|
||||
tid = str(task.get("id", ""))
|
||||
name = task.get("name") or tid
|
||||
list_obj = task.get("list") or {}
|
||||
lid = str(list_obj.get("id", "")) if list_obj else ""
|
||||
if not lid:
|
||||
continue
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}/task/{tid}",
|
||||
isFolder=False,
|
||||
metadata={"cuType": _CU_TASK, "id": tid, "task": task},
|
||||
)
|
||||
)
|
||||
if len(tasks) < 25:
|
||||
break
|
||||
page += 1
|
||||
return out
|
||||
|
||||
|
||||
class ClickupConnector(ProviderConnector):
|
||||
"""One ClickUp connection → clickup virtual file service."""
|
||||
|
||||
def getAvailableServices(self) -> List[str]:
|
||||
return ["clickup"]
|
||||
|
||||
def getServiceAdapter(self, service: str) -> ServiceAdapter:
|
||||
if service != "clickup":
|
||||
raise ValueError(f"ClickUp only supports 'clickup' service, got '{service}'")
|
||||
return ClickupListsAdapter(self.accessToken)
|
||||
|
|
@ -22,6 +22,10 @@ class OperationTypeEnum(str, Enum):
|
|||
IMAGE_ANALYSE = "imageAnalyse"
|
||||
IMAGE_GENERATE = "imageGenerate"
|
||||
|
||||
# Neutralization (dedicated model selection; text vs vision backends)
|
||||
NEUTRALIZATION_TEXT = "neutralizationText"
|
||||
NEUTRALIZATION_IMAGE = "neutralizationImage"
|
||||
|
||||
# Web Operations
|
||||
WEB_SEARCH_DATA = "webSearch" # Returns list of URLs only
|
||||
WEB_CRAWL = "webCrawl" # Web crawl for a given URL
|
||||
|
|
@ -168,6 +172,8 @@ class AiCallRequest(BaseModel):
|
|||
contentParts: Optional[List['ContentPart']] = None # Content parts for model-aware chunking
|
||||
messages: Optional[List[Dict[str, Any]]] = Field(default=None, description="OpenAI-style messages for multi-turn agent conversations")
|
||||
tools: Optional[List[Dict[str, Any]]] = Field(default=None, description="Tool definitions for native function calling")
|
||||
toolChoice: Optional[Any] = Field(default=None, description="Tool choice: 'auto', 'none', or specific tool (passed through to model call)")
|
||||
requireNeutralization: Optional[bool] = Field(default=None, description="Per-request neutralization override: True=force, False=skip, None=use config")
|
||||
|
||||
|
||||
class AiCallResponse(BaseModel):
|
||||
|
|
|
|||
68
modules/datamodels/datamodelBase.py
Normal file
68
modules/datamodels/datamodelBase.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Base Pydantic model with system-managed fields (DB + API + UI metadata)."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
|
||||
|
||||
class PowerOnModel(BaseModel):
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Record creation timestamp (UTC, set by system)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
},
|
||||
)
|
||||
sysCreatedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User ID who created this record (set by system)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
},
|
||||
)
|
||||
sysModifiedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Record last modification timestamp (UTC, set by system)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
},
|
||||
)
|
||||
sysModifiedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User ID who last modified this record (set by system)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"PowerOnModel",
|
||||
{"en": "Base Record", "de": "Basisdatensatz"},
|
||||
{
|
||||
"sysCreatedAt": {"en": "Created At", "de": "Erstellt am", "fr": "Cree le"},
|
||||
"sysCreatedBy": {"en": "Created By", "de": "Erstellt von", "fr": "Cree par"},
|
||||
"sysModifiedAt": {"en": "Modified At", "de": "Geaendert am", "fr": "Modifie le"},
|
||||
"sysModifiedBy": {"en": "Modified By", "de": "Geaendert von", "fr": "Modifie par"},
|
||||
},
|
||||
)
|
||||
|
|
@ -6,24 +6,12 @@ from typing import List, Dict, Any, Optional
|
|||
from enum import Enum
|
||||
from datetime import date, datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
import uuid
|
||||
|
||||
|
||||
class BillingModelEnum(str, Enum):
|
||||
"""Billing model types (prepaid only; legacy UNLIMITED in DB maps to PREPAY_MANDATE)."""
|
||||
PREPAY_MANDATE = "PREPAY_MANDATE" # Prepaid budget shared by all users in mandate
|
||||
PREPAY_USER = "PREPAY_USER" # Prepaid budget per user within mandate
|
||||
|
||||
|
||||
# Nur fuer initRootMandateBilling (Root-Mandant PREPAY_USER + Startguthaben in Settings).
|
||||
DEFAULT_USER_CREDIT_CHF = 5.0
|
||||
|
||||
|
||||
class AccountTypeEnum(str, Enum):
|
||||
"""Account type for billing accounts."""
|
||||
MANDATE = "MANDATE" # Account for entire mandate
|
||||
USER = "USER" # Account for specific user within mandate
|
||||
# End-customer price for storage above plan-included volume (CHF per GB per month).
|
||||
STORAGE_PRICE_PER_GB_CHF = 0.50
|
||||
|
||||
|
||||
class TransactionTypeEnum(str, Enum):
|
||||
|
|
@ -39,6 +27,8 @@ class ReferenceTypeEnum(str, Enum):
|
|||
PAYMENT = "PAYMENT" # Payment/top-up
|
||||
ADMIN = "ADMIN" # Admin adjustment
|
||||
SYSTEM = "SYSTEM" # System credit (e.g., initial credit)
|
||||
STORAGE = "STORAGE" # Metered storage overage (prepay pool)
|
||||
SUBSCRIPTION = "SUBSCRIPTION" # AI budget credit from subscription plan
|
||||
|
||||
|
||||
class PeriodTypeEnum(str, Enum):
|
||||
|
|
@ -48,14 +38,13 @@ class PeriodTypeEnum(str, Enum):
|
|||
YEAR = "YEAR"
|
||||
|
||||
|
||||
class BillingAccount(BaseModel):
|
||||
class BillingAccount(PowerOnModel):
|
||||
"""Billing account for mandate or user-mandate combination."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
)
|
||||
mandateId: str = Field(..., description="Foreign key to Mandate")
|
||||
userId: Optional[str] = Field(None, description="Foreign key to User (only for PREPAY_USER)")
|
||||
accountType: AccountTypeEnum = Field(..., description="Account type: MANDATE or USER")
|
||||
userId: Optional[str] = Field(None, description="Foreign key to User (None = mandate pool account, set = user audit account)")
|
||||
balance: float = Field(default=0.0, description="Current balance in CHF")
|
||||
warningThreshold: float = Field(default=0.0, description="Warning threshold in CHF")
|
||||
lastWarningAt: Optional[datetime] = Field(None, description="Last warning sent timestamp")
|
||||
|
|
@ -69,7 +58,6 @@ registerModelLabels(
|
|||
"id": {"en": "ID", "de": "ID"},
|
||||
"mandateId": {"en": "Mandate ID", "de": "Mandanten-ID"},
|
||||
"userId": {"en": "User ID", "de": "Benutzer-ID"},
|
||||
"accountType": {"en": "Account Type", "de": "Kontotyp"},
|
||||
"balance": {"en": "Balance (CHF)", "de": "Guthaben (CHF)"},
|
||||
"warningThreshold": {"en": "Warning Threshold (CHF)", "de": "Warnschwelle (CHF)"},
|
||||
"lastWarningAt": {"en": "Last Warning", "de": "Letzte Warnung"},
|
||||
|
|
@ -78,7 +66,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class BillingTransaction(BaseModel):
|
||||
class BillingTransaction(PowerOnModel):
|
||||
"""Single billing transaction (credit, debit, adjustment)."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
|
|
@ -129,30 +117,43 @@ registerModelLabels(
|
|||
|
||||
|
||||
class BillingSettings(BaseModel):
|
||||
"""Billing settings per mandate."""
|
||||
"""Billing settings per mandate. Only PREPAY_MANDATE model."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
)
|
||||
mandateId: str = Field(..., description="Foreign key to Mandate (UNIQUE)")
|
||||
billingModel: BillingModelEnum = Field(..., description="Billing model")
|
||||
|
||||
# Configuration
|
||||
defaultUserCredit: float = Field(
|
||||
default=0.0,
|
||||
description="Automatic initial credit (CHF) for PREPAY_USER only when a user is newly added to the root mandate; other mandates use 0 on join.",
|
||||
)
|
||||
warningThresholdPercent: float = Field(default=10.0, description="Warning threshold as percentage")
|
||||
|
||||
# Stripe
|
||||
stripeCustomerId: Optional[str] = Field(None, description="Stripe Customer ID (cus_xxx) — one per mandate")
|
||||
|
||||
# Notifications (e.g. mandate owner / finance — also used when PREPAY_MANDATE pool is exhausted)
|
||||
# Auto-Recharge for AI budget
|
||||
autoRechargeEnabled: bool = Field(default=False, description="Auto-buy AI budget when low")
|
||||
rechargeAmountCHF: float = Field(default=10.0, description="Amount per auto-recharge (CHF, prepaid via Stripe)")
|
||||
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month")
|
||||
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month")
|
||||
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset")
|
||||
|
||||
# Notifications
|
||||
notifyEmails: List[str] = Field(
|
||||
default_factory=list,
|
||||
description="Email addresses for billing alerts (mandate pool exhausted, warnings, etc.)",
|
||||
description="Email addresses for billing alerts (pool exhausted, warnings, etc.)",
|
||||
)
|
||||
notifyOnWarning: bool = Field(default=True, description="Send email when warning threshold is reached")
|
||||
|
||||
# Storage overage (high-watermark within subscription period; resets on new period)
|
||||
storageHighWatermarkMB: float = Field(
|
||||
default=0.0, description="Peak indexed data volume MB this billing period"
|
||||
)
|
||||
storagePeriodStartAt: Optional[datetime] = Field(
|
||||
None, description="Subscription billing period start used for storage reset"
|
||||
)
|
||||
storageBilledUpToMB: float = Field(
|
||||
default=0.0,
|
||||
description="Overage MB already debited this period (above plan-included volume)",
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"BillingSettings",
|
||||
|
|
@ -160,18 +161,22 @@ registerModelLabels(
|
|||
{
|
||||
"id": {"en": "ID", "de": "ID"},
|
||||
"mandateId": {"en": "Mandate ID", "de": "Mandanten-ID"},
|
||||
"billingModel": {"en": "Billing Model", "de": "Abrechnungsmodell"},
|
||||
"defaultUserCredit": {
|
||||
"en": "Root start credit (CHF)",
|
||||
"de": "Startguthaben nur Root-Mandant (CHF)",
|
||||
},
|
||||
"warningThresholdPercent": {"en": "Warning Threshold (%)", "de": "Warnschwelle (%)"},
|
||||
"stripeCustomerId": {"en": "Stripe Customer ID", "de": "Stripe-Kunden-ID"},
|
||||
"autoRechargeEnabled": {"en": "Auto-Recharge", "de": "Auto-Nachladung"},
|
||||
"rechargeAmountCHF": {"en": "Recharge Amount (CHF)", "de": "Nachladebetrag (CHF)"},
|
||||
"rechargeMaxPerMonth": {"en": "Max Recharges/Month", "de": "Max. Nachladungen/Monat"},
|
||||
"notifyEmails": {
|
||||
"en": "Billing notification emails (owner / admin)",
|
||||
"de": "E-Mails für Billing-Alerts (Inhaber/Admin)",
|
||||
"de": "E-Mails fuer Billing-Alerts (Inhaber/Admin)",
|
||||
},
|
||||
"notifyOnWarning": {"en": "Notify on Warning", "de": "Bei Warnung benachrichtigen"},
|
||||
"storageHighWatermarkMB": {"en": "Storage peak (MB)", "de": "Speicher-Peak (MB)"},
|
||||
"storagePeriodStartAt": {"en": "Storage period start", "de": "Speicher-Periodenbeginn"},
|
||||
"storageBilledUpToMB": {
|
||||
"en": "Storage billed overage (MB)",
|
||||
"de": "Speicher abgerechneter Überhang (MB)",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -238,7 +243,6 @@ class BillingBalanceResponse(BaseModel):
|
|||
"""Response model for balance endpoint."""
|
||||
mandateId: str
|
||||
mandateName: str
|
||||
billingModel: BillingModelEnum
|
||||
balance: float
|
||||
currency: str = "CHF"
|
||||
warningThreshold: float
|
||||
|
|
@ -269,20 +273,8 @@ class BillingCheckResult(BaseModel):
|
|||
reason: Optional[str] = None
|
||||
currentBalance: Optional[float] = None
|
||||
requiredAmount: Optional[float] = None
|
||||
billingModel: Optional[BillingModelEnum] = None
|
||||
upgradeRequired: Optional[bool] = None
|
||||
subscriptionUiPath: Optional[str] = None
|
||||
userAction: Optional[str] = None
|
||||
|
||||
|
||||
def parseBillingModelFromStoredValue(raw: Optional[str]) -> BillingModelEnum:
|
||||
"""Map DB string to enum. Legacy UNLIMITED / unknown values become PREPAY_MANDATE."""
|
||||
if raw is None or (isinstance(raw, str) and raw.strip() == ""):
|
||||
return BillingModelEnum.PREPAY_MANDATE
|
||||
s = str(raw).strip().upper()
|
||||
if s == "UNLIMITED":
|
||||
return BillingModelEnum.PREPAY_MANDATE
|
||||
try:
|
||||
return BillingModelEnum(raw)
|
||||
except ValueError:
|
||||
return BillingModelEnum.PREPAY_MANDATE
|
||||
|
|
|
|||
|
|
@ -5,12 +5,13 @@
|
|||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class ChatLog(BaseModel):
|
||||
class ChatLog(PowerOnModel):
|
||||
"""Log entries for chat workflows. User-owned, no mandate context."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
|
|
@ -56,7 +57,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class ChatDocument(BaseModel):
|
||||
class ChatDocument(PowerOnModel):
|
||||
"""Documents attached to chat messages. User-owned, no mandate context."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
|
|
@ -163,7 +164,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
class ChatMessage(PowerOnModel):
|
||||
"""Messages in chat workflows. User-owned, no mandate context."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
|
||||
|
|
@ -260,7 +261,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class ChatWorkflow(BaseModel):
|
||||
class ChatWorkflow(PowerOnModel):
|
||||
"""Chat workflow container. User-owned, no mandate context."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID for multi-tenancy isolation", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
|
|
|
|||
|
|
@ -8,16 +8,18 @@ Google Drive folder, FTP directory, etc.) for agent-accessible data containers.
|
|||
|
||||
from typing import Dict, Any, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class DataSource(BaseModel):
|
||||
class DataSource(PowerOnModel):
|
||||
"""Configured external data source linked to a UserConnection."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
connectionId: str = Field(description="FK to UserConnection")
|
||||
sourceType: str = Field(description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder")
|
||||
sourceType: str = Field(
|
||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)"
|
||||
)
|
||||
path: str = Field(description="External path (e.g. '/sites/MySite/Documents/Reports')")
|
||||
label: str = Field(description="User-visible label (often the last path segment)")
|
||||
displayPath: Optional[str] = Field(
|
||||
|
|
@ -29,7 +31,21 @@ class DataSource(BaseModel):
|
|||
userId: str = Field(default="", description="Owner user ID")
|
||||
autoSync: bool = Field(default=False, description="Automatically sync on schedule")
|
||||
lastSynced: Optional[float] = Field(default=None, description="Last sync timestamp")
|
||||
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": {"en": "Personal", "de": "Persönlich"}},
|
||||
{"value": "featureInstance", "label": {"en": "Feature Instance", "de": "Feature-Instanz"}},
|
||||
{"value": "mandate", "label": {"en": "Mandate", "de": "Mandant"}},
|
||||
{"value": "global", "label": {"en": "Global", "de": "Global"}},
|
||||
]}
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether this data source should be neutralized before AI processing",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -47,7 +63,8 @@ registerModelLabels(
|
|||
"userId": {"en": "User ID", "de": "Benutzer-ID", "fr": "ID utilisateur"},
|
||||
"autoSync": {"en": "Auto Sync", "de": "Auto-Sync", "fr": "Synchro auto"},
|
||||
"lastSynced": {"en": "Last Synced", "de": "Letzter Sync", "fr": "Dernier sync"},
|
||||
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
|
||||
"scope": {"en": "Scope", "de": "Sichtbarkeit"},
|
||||
"neutralize": {"en": "Neutralize", "de": "Neutralisieren"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ A FeatureDataSource links a FeatureInstance table (DATA_OBJECT) to a workspace
|
|||
so the agent can query structured feature data (e.g. TrusteePosition rows).
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from typing import Dict, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class FeatureDataSource(BaseModel):
|
||||
class FeatureDataSource(PowerOnModel):
|
||||
"""A feature-instance table attached as data source in the AI workspace."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
featureInstanceId: str = Field(description="FK to FeatureInstance")
|
||||
|
|
@ -24,7 +24,25 @@ class FeatureDataSource(BaseModel):
|
|||
mandateId: str = Field(default="", description="Mandate scope")
|
||||
userId: str = Field(default="", description="Owner user ID")
|
||||
workspaceInstanceId: str = Field(description="Workspace instance where this source is used")
|
||||
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": {"en": "Personal", "de": "Persönlich"}},
|
||||
{"value": "featureInstance", "label": {"en": "Feature Instance", "de": "Feature-Instanz"}},
|
||||
{"value": "mandate", "label": {"en": "Mandate", "de": "Mandant"}},
|
||||
{"value": "global", "label": {"en": "Global", "de": "Global"}},
|
||||
]}
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether this data source should be neutralized before AI processing",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
recordFilter: Optional[Dict[str, str]] = Field(
|
||||
default=None,
|
||||
description="Record-level filter applied when querying this table, e.g. {'sessionId': 'abc-123'}",
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -40,6 +58,5 @@ registerModelLabels(
|
|||
"mandateId": {"en": "Mandate", "de": "Mandant", "fr": "Mandat"},
|
||||
"userId": {"en": "User", "de": "Benutzer", "fr": "Utilisateur"},
|
||||
"workspaceInstanceId": {"en": "Workspace", "de": "Workspace", "fr": "Espace de travail"},
|
||||
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,11 +5,12 @@
|
|||
import uuid
|
||||
from typing import Optional, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.datamodels.datamodelUtils import TextMultilingual
|
||||
|
||||
|
||||
class Feature(BaseModel):
|
||||
class Feature(PowerOnModel):
|
||||
"""
|
||||
Feature-Definition (global, z.B. 'trustee', 'chatbot').
|
||||
Features sind die verfügbaren Funktionalitäten der Plattform.
|
||||
|
|
@ -40,7 +41,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class FeatureInstance(BaseModel):
|
||||
class FeatureInstance(PowerOnModel):
|
||||
"""
|
||||
Instanz eines Features in einem Mandanten.
|
||||
Ein Mandant kann mehrere Instanzen desselben Features haben.
|
||||
|
|
|
|||
|
|
@ -4,18 +4,17 @@
|
|||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class FileFolder(BaseModel):
|
||||
class FileFolder(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
name: str = Field(description="Folder name", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True})
|
||||
parentId: Optional[str] = Field(default=None, description="Parent folder ID (null = root)", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
|
||||
mandateId: Optional[str] = Field(default=None, description="Mandate context", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
featureInstanceId: Optional[str] = Field(default=None, description="Feature instance context", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -27,6 +26,5 @@ registerModelLabels(
|
|||
"parentId": {"en": "Parent Folder", "fr": "Dossier parent"},
|
||||
"mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
|
||||
"featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance"},
|
||||
"createdAt": {"en": "Created At", "fr": "Créé le"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,15 +3,14 @@
|
|||
"""File-related datamodels: FileItem, FilePreview, FileData."""
|
||||
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
import base64
|
||||
|
||||
|
||||
class FileItem(BaseModel):
|
||||
model_config = ConfigDict(extra='allow') # Preserve system fields (_createdBy, _createdAt, etc.)
|
||||
class FileItem(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
mandateId: Optional[str] = Field(default="", description="ID of the mandate this file belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
featureInstanceId: Optional[str] = Field(default="", description="ID of the feature instance this file belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"})
|
||||
|
|
@ -19,11 +18,25 @@ class FileItem(BaseModel):
|
|||
mimeType: str = Field(description="MIME type of the file", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
fileHash: str = Field(description="Hash of the file", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
fileSize: int = Field(description="Size of the file in bytes", json_schema_extra={"frontend_type": "integer", "frontend_readonly": True, "frontend_required": False})
|
||||
creationDate: float = Field(default_factory=getUtcTimestamp, description="Date when the file was created (UTC timestamp in seconds)", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
|
||||
tags: Optional[List[str]] = Field(default=None, description="Tags for categorization and search", json_schema_extra={"frontend_type": "tags", "frontend_readonly": False, "frontend_required": False})
|
||||
folderId: Optional[str] = Field(default=None, description="ID of the parent folder", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
|
||||
description: Optional[str] = Field(default=None, description="User-provided description of the file", json_schema_extra={"frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
|
||||
status: Optional[str] = Field(default=None, description="Processing status: pending, extracted, embedding, indexed, failed", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": {"en": "Personal", "de": "Persönlich"}},
|
||||
{"value": "featureInstance", "label": {"en": "Feature Instance", "de": "Feature-Instanz"}},
|
||||
{"value": "mandate", "label": {"en": "Mandate", "de": "Mandant"}},
|
||||
{"value": "global", "label": {"en": "Global", "de": "Global"}},
|
||||
]}
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether this file should be neutralized before AI processing",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
|
||||
registerModelLabels(
|
||||
"FileItem",
|
||||
|
|
@ -36,11 +49,12 @@ registerModelLabels(
|
|||
"mimeType": {"en": "MIME Type", "fr": "Type MIME"},
|
||||
"fileHash": {"en": "File Hash", "fr": "Hash du fichier"},
|
||||
"fileSize": {"en": "File Size", "fr": "Taille du fichier"},
|
||||
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
|
||||
"tags": {"en": "Tags", "fr": "Tags"},
|
||||
"folderId": {"en": "Folder ID", "fr": "ID du dossier"},
|
||||
"description": {"en": "Description", "fr": "Description"},
|
||||
"status": {"en": "Status", "fr": "Statut"},
|
||||
"scope": {"en": "Scope", "de": "Sichtbarkeit"},
|
||||
"neutralize": {"en": "Neutralize", "de": "Neutralisieren"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -71,7 +85,7 @@ registerModelLabels(
|
|||
},
|
||||
)
|
||||
|
||||
class FileData(BaseModel):
|
||||
class FileData(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
data: str = Field(description="File data content")
|
||||
base64Encoded: bool = Field(description="Whether the data is base64 encoded")
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ import uuid
|
|||
import secrets
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
|
||||
class Invitation(BaseModel):
|
||||
class Invitation(PowerOnModel):
|
||||
"""
|
||||
Einladungs-Token für neue User.
|
||||
Ermöglicht Self-Service Onboarding zu Mandanten und Feature-Instanzen.
|
||||
|
|
@ -56,15 +56,6 @@ class Invitation(BaseModel):
|
|||
description="Email address to send invitation link (optional)",
|
||||
json_schema_extra={"frontend_type": "email", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
createdBy: str = Field(
|
||||
description="User ID of the person who created the invitation",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
|
||||
)
|
||||
createdAt: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the invitation was created (UTC timestamp)",
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
expiresAt: float = Field(
|
||||
description="When the invitation expires (UTC timestamp)",
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
|
||||
|
|
@ -121,8 +112,6 @@ registerModelLabels(
|
|||
"roleIds": {"en": "Roles", "de": "Rollen", "fr": "Rôles"},
|
||||
"targetUsername": {"en": "Target Username", "de": "Ziel-Benutzername", "fr": "Nom d'utilisateur cible"},
|
||||
"email": {"en": "Email (optional)", "de": "E-Mail (optional)", "fr": "Email (optionnel)"},
|
||||
"createdBy": {"en": "Created By", "de": "Erstellt von", "fr": "Créé par"},
|
||||
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
|
||||
"expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},
|
||||
"usedBy": {"en": "Used By", "de": "Verwendet von", "fr": "Utilisé par"},
|
||||
"usedAt": {"en": "Used At", "de": "Verwendet am", "fr": "Utilisé le"},
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@
|
|||
"""Knowledge Store data models: FileContentIndex, ContentChunk, WorkflowMemory.
|
||||
|
||||
These models support the 3-tier RAG architecture:
|
||||
- Shared Layer: mandateId-scoped, isShared=True
|
||||
- Instance Layer: userId + featureInstanceId-scoped
|
||||
- Personal Layer: scope=personal, userId-scoped
|
||||
- Instance Layer: scope=featureInstance, featureInstanceId-scoped
|
||||
- Mandate Layer: scope=mandate, mandateId-scoped (visible to all mandate users)
|
||||
- Global Layer: scope=global (sysAdmin only)
|
||||
- Workflow Layer: workflowId-scoped (WorkflowMemory)
|
||||
|
||||
Vector fields use json_schema_extra={"db_type": "vector(1536)"} for pgvector.
|
||||
|
|
@ -12,19 +14,19 @@ Vector fields use json_schema_extra={"db_type": "vector(1536)"} for pgvector.
|
|||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class FileContentIndex(BaseModel):
|
||||
class FileContentIndex(PowerOnModel):
|
||||
"""Structural index of a file's content objects. Created without AI.
|
||||
Lives in the Instance Layer; optionally promoted to Shared Layer via isShared."""
|
||||
Scope is mirrored from FileItem (poweron_management) at indexing time."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key (typically = fileId)")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
featureInstanceId: str = Field(default="", description="Feature instance scope")
|
||||
mandateId: str = Field(default="", description="Mandate scope")
|
||||
isShared: bool = Field(default=False, description="Visible in Shared Layer for all mandate users")
|
||||
fileName: str = Field(description="Original file name")
|
||||
mimeType: str = Field(description="MIME type of the file")
|
||||
containerPath: Optional[str] = Field(default=None, description="Path within a container (e.g. 'archive.zip/folder/report.pdf')")
|
||||
|
|
@ -34,6 +36,18 @@ class FileContentIndex(BaseModel):
|
|||
objectSummary: List[Dict[str, Any]] = Field(default_factory=list, description="Compact summary per content object")
|
||||
extractedAt: float = Field(default_factory=getUtcTimestamp, description="Extraction timestamp")
|
||||
status: str = Field(default="pending", description="Processing status: pending, extracted, embedding, indexed, failed")
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
)
|
||||
neutralizationStatus: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Neutralization status: completed, failed, skipped, None = not required",
|
||||
)
|
||||
isNeutralized: bool = Field(
|
||||
default=False,
|
||||
description="True if content was neutralized before indexing",
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -44,7 +58,6 @@ registerModelLabels(
|
|||
"userId": {"en": "User ID", "fr": "ID utilisateur"},
|
||||
"featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance"},
|
||||
"mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
|
||||
"isShared": {"en": "Shared", "fr": "Partagé"},
|
||||
"fileName": {"en": "File Name", "fr": "Nom de fichier"},
|
||||
"mimeType": {"en": "MIME Type", "fr": "Type MIME"},
|
||||
"containerPath": {"en": "Container Path", "fr": "Chemin du conteneur"},
|
||||
|
|
@ -54,11 +67,14 @@ registerModelLabels(
|
|||
"objectSummary": {"en": "Object Summary", "fr": "Résumé des objets"},
|
||||
"extractedAt": {"en": "Extracted At", "fr": "Extrait le"},
|
||||
"status": {"en": "Status", "fr": "Statut"},
|
||||
"scope": {"en": "Scope", "de": "Sichtbarkeit"},
|
||||
"neutralizationStatus": {"en": "Neutralization Status", "de": "Neutralisierungsstatus"},
|
||||
"isNeutralized": {"en": "Is Neutralized", "de": "Neutralisiert"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ContentChunk(BaseModel):
|
||||
class ContentChunk(PowerOnModel):
|
||||
"""Persisted content chunk with embedding vector. Reusable across workflows.
|
||||
Scalar content object (or chunk thereof) with pgvector embedding."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
|
|
@ -96,7 +112,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class RoundMemory(BaseModel):
|
||||
class RoundMemory(PowerOnModel):
|
||||
"""Persistent per-round memory for agent tool results, file refs, and decisions.
|
||||
|
||||
Stored after each agent round so that RAG can retrieve relevant context
|
||||
|
|
@ -120,7 +136,6 @@ class RoundMemory(BaseModel):
|
|||
description="Embedding of summary for semantic retrieval",
|
||||
json_schema_extra={"db_type": "vector(1536)"},
|
||||
)
|
||||
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -136,12 +151,11 @@ registerModelLabels(
|
|||
"fullData": {"en": "Full Data", "fr": "Données complètes"},
|
||||
"fileIds": {"en": "File IDs", "fr": "IDs de fichier"},
|
||||
"embedding": {"en": "Embedding", "fr": "Vecteur d'embedding"},
|
||||
"createdAt": {"en": "Created At", "fr": "Créé le"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class WorkflowMemory(BaseModel):
|
||||
class WorkflowMemory(PowerOnModel):
|
||||
"""Workflow-scoped key-value cache for entities and facts.
|
||||
Extracted during agent rounds, persisted for cross-round and cross-workflow reuse."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
|
|
@ -151,7 +165,6 @@ class WorkflowMemory(BaseModel):
|
|||
key: str = Field(description="Key identifier (e.g. 'entity:companyName')")
|
||||
value: str = Field(description="Extracted value")
|
||||
source: str = Field(default="extraction", description="Origin: extraction, tool, conversation, summary")
|
||||
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
|
||||
embedding: Optional[List[float]] = Field(
|
||||
default=None, description="Optional embedding for semantic lookup",
|
||||
json_schema_extra={"db_type": "vector(1536)"}
|
||||
|
|
@ -169,7 +182,6 @@ registerModelLabels(
|
|||
"key": {"en": "Key", "fr": "Clé"},
|
||||
"value": {"en": "Value", "fr": "Valeur"},
|
||||
"source": {"en": "Source", "fr": "Source"},
|
||||
"createdAt": {"en": "Created At", "fr": "Créé le"},
|
||||
"embedding": {"en": "Embedding", "fr": "Vecteur d'embedding"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,10 +9,11 @@ Rollen werden über Junction Tables verknüpft für saubere CASCADE DELETE.
|
|||
|
||||
import uuid
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
|
||||
|
||||
class UserMandate(BaseModel):
|
||||
class UserMandate(PowerOnModel):
|
||||
"""
|
||||
User-Mitgliedschaft in einem Mandanten.
|
||||
Kein User gehört direkt zu einem Mandanten - Zugehörigkeit wird über dieses Model gesteuert.
|
||||
|
|
@ -50,7 +51,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class FeatureAccess(BaseModel):
|
||||
class FeatureAccess(PowerOnModel):
|
||||
"""
|
||||
User-Zugriff auf eine Feature-Instanz.
|
||||
Definiert welche User auf welche Feature-Instanzen zugreifen können.
|
||||
|
|
@ -88,7 +89,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class UserMandateRole(BaseModel):
|
||||
class UserMandateRole(PowerOnModel):
|
||||
"""
|
||||
Junction Table: UserMandate zu Role.
|
||||
Ermöglicht CASCADE DELETE auf Datenbankebene.
|
||||
|
|
@ -119,7 +120,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class FeatureAccessRole(BaseModel):
|
||||
class FeatureAccessRole(PowerOnModel):
|
||||
"""
|
||||
Junction Table: FeatureAccess zu Role.
|
||||
Ermöglicht CASCADE DELETE auf Datenbankebene.
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ import uuid
|
|||
from typing import Optional
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
|
||||
class MessagingChannel(str, Enum):
|
||||
|
|
@ -26,7 +26,7 @@ class DeliveryStatus(str, Enum):
|
|||
FAILED = "failed"
|
||||
|
||||
|
||||
class MessagingSubscription(BaseModel):
|
||||
class MessagingSubscription(PowerOnModel):
|
||||
"""Data model for messaging subscriptions"""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -64,26 +64,6 @@ class MessagingSubscription(BaseModel):
|
|||
description="Whether the subscription is enabled",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
creationDate: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the subscription was created (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
lastModified: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the subscription was last modified (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
createdBy: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User ID who created the subscription",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
modifiedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User ID who last modified the subscription",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
|
|
@ -100,10 +80,6 @@ registerModelLabels(
|
|||
"description": {"en": "Description", "fr": "Description"},
|
||||
"isSystemSubscription": {"en": "System Subscription", "fr": "Abonnement système"},
|
||||
"enabled": {"en": "Enabled", "fr": "Activé"},
|
||||
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
|
||||
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
|
||||
"createdBy": {"en": "Created By", "fr": "Créé par"},
|
||||
"modifiedBy": {"en": "Modified By", "fr": "Modifié par"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -155,16 +131,6 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
description="Whether this registration is enabled",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
|
||||
)
|
||||
creationDate: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the registration was created (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
lastModified: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the registration was last modified (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
|
|
@ -181,8 +147,6 @@ registerModelLabels(
|
|||
"channel": {"en": "Channel", "fr": "Canal"},
|
||||
"channelConfig": {"en": "Channel Config", "fr": "Configuration du canal"},
|
||||
"enabled": {"en": "Enabled", "fr": "Activé"},
|
||||
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
|
||||
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -248,11 +212,6 @@ class MessagingDelivery(BaseModel):
|
|||
description="When the delivery was sent (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
creationDate: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the delivery record was created (UTC timestamp in seconds)",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
model_config = ConfigDict(use_enum_values=True)
|
||||
|
||||
|
|
@ -270,7 +229,6 @@ registerModelLabels(
|
|||
"status": {"en": "Status", "fr": "Statut"},
|
||||
"errorMessage": {"en": "Error Message", "fr": "Message d'erreur"},
|
||||
"sentAt": {"en": "Sent At", "fr": "Envoyé le"},
|
||||
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -349,4 +307,3 @@ class MessagingSubscriptionExecutionResult(BaseModel):
|
|||
description="Error message if execution failed",
|
||||
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
model_config = ConfigDict(extra="allow") # Allow additional fields for custom results
|
||||
|
|
|
|||
|
|
@ -9,8 +9,8 @@ import uuid
|
|||
from typing import Optional, List
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
|
||||
class NotificationType(str, Enum):
|
||||
|
|
@ -43,7 +43,7 @@ class NotificationAction(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class UserNotification(BaseModel):
|
||||
class UserNotification(PowerOnModel):
|
||||
"""
|
||||
In-app notification for a user.
|
||||
Supports actionable notifications with accept/decline buttons.
|
||||
|
|
@ -137,11 +137,6 @@ class UserNotification(BaseModel):
|
|||
)
|
||||
|
||||
# Timestamps
|
||||
createdAt: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="When the notification was created (UTC timestamp)",
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
readAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="When the notification was read (UTC timestamp)",
|
||||
|
|
@ -177,7 +172,6 @@ registerModelLabels(
|
|||
"actions": {"en": "Actions", "de": "Aktionen", "fr": "Actions"},
|
||||
"actionTaken": {"en": "Action Taken", "de": "Durchgeführte Aktion", "fr": "Action effectuée"},
|
||||
"actionResult": {"en": "Action Result", "de": "Aktions-Ergebnis", "fr": "Résultat de l'action"},
|
||||
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
|
||||
"readAt": {"en": "Read At", "de": "Gelesen am", "fr": "Lu le"},
|
||||
"actionedAt": {"en": "Actioned At", "de": "Bearbeitet am", "fr": "Traité le"},
|
||||
"expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import uuid
|
|||
from typing import Optional
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.datamodels.datamodelUtils import TextMultilingual
|
||||
from modules.datamodels.datamodelUam import AccessLevel
|
||||
|
|
@ -25,7 +26,7 @@ class AccessRuleContext(str, Enum):
|
|||
RESOURCE = "RESOURCE" # System resources (AI models, actions, etc.)
|
||||
|
||||
|
||||
class Role(BaseModel):
|
||||
class Role(PowerOnModel):
|
||||
"""
|
||||
Data model for RBAC roles.
|
||||
|
||||
|
|
@ -90,7 +91,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class AccessRule(BaseModel):
|
||||
class AccessRule(PowerOnModel):
|
||||
"""
|
||||
Data model for access control rules.
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ Multi-Tenant Design:
|
|||
|
||||
from typing import Optional, Any
|
||||
from pydantic import BaseModel, Field, ConfigDict, model_validator
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
from .datamodelUam import AuthAuthority
|
||||
|
|
@ -30,7 +31,7 @@ class TokenPurpose(str, Enum):
|
|||
DATA_CONNECTION = "dataConnection"
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
class Token(PowerOnModel):
|
||||
"""
|
||||
Authentication Token model.
|
||||
|
||||
|
|
@ -55,9 +56,6 @@ class Token(BaseModel):
|
|||
description="When the token expires (UTC timestamp in seconds)"
|
||||
)
|
||||
tokenRefresh: Optional[str] = None
|
||||
createdAt: Optional[float] = Field(
|
||||
None, description="When the token was created (UTC timestamp in seconds)"
|
||||
)
|
||||
status: TokenStatus = Field(
|
||||
default=TokenStatus.ACTIVE, description="Token status: active/revoked"
|
||||
)
|
||||
|
|
@ -106,7 +104,6 @@ registerModelLabels(
|
|||
"tokenType": {"en": "Token Type", "de": "Token-Typ", "fr": "Type de jeton"},
|
||||
"expiresAt": {"en": "Expires At", "de": "Läuft ab am", "fr": "Expire le"},
|
||||
"tokenRefresh": {"en": "Refresh Token", "de": "Refresh-Token", "fr": "Jeton de rafraîchissement"},
|
||||
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
|
||||
"status": {"en": "Status", "de": "Status", "fr": "Statut"},
|
||||
"revokedAt": {"en": "Revoked At", "de": "Widerrufen am", "fr": "Révoqué le"},
|
||||
"revokedBy": {"en": "Revoked By", "de": "Widerrufen von", "fr": "Révoqué par"},
|
||||
|
|
@ -116,7 +113,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class AuthEvent(BaseModel):
|
||||
class AuthEvent(PowerOnModel):
|
||||
"""Authentication event for audit logging."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the auth event", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
userId: str = Field(description="ID of the user this event belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from typing import Dict, List, Optional
|
|||
from enum import Enum
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
import uuid
|
||||
|
||||
|
|
@ -30,6 +31,7 @@ OPERATIVE_STATUSES = {SubscriptionStatusEnum.ACTIVE, SubscriptionStatusEnum.TRIA
|
|||
|
||||
ALLOWED_TRANSITIONS = {
|
||||
(SubscriptionStatusEnum.PENDING, SubscriptionStatusEnum.ACTIVE),
|
||||
(SubscriptionStatusEnum.PENDING, SubscriptionStatusEnum.TRIALING),
|
||||
(SubscriptionStatusEnum.PENDING, SubscriptionStatusEnum.SCHEDULED),
|
||||
(SubscriptionStatusEnum.PENDING, SubscriptionStatusEnum.EXPIRED),
|
||||
(SubscriptionStatusEnum.SCHEDULED, SubscriptionStatusEnum.ACTIVE),
|
||||
|
|
@ -70,6 +72,8 @@ class SubscriptionPlan(BaseModel):
|
|||
maxUsers: Optional[int] = Field(None, description="Hard cap on active users (None = unlimited)")
|
||||
maxFeatureInstances: Optional[int] = Field(None, description="Hard cap on active feature instances (None = unlimited)")
|
||||
trialDays: Optional[int] = Field(None, description="Trial duration in days (only for trial plans)")
|
||||
maxDataVolumeMB: Optional[int] = Field(None, description="Soft-limit for data volume in MB per mandate (None = unlimited)")
|
||||
budgetAiCHF: float = Field(default=0.0, description="AI budget (CHF) included in subscription price per billing period")
|
||||
successorPlanKey: Optional[str] = Field(None, description="Plan to transition to when trial ends")
|
||||
|
||||
|
||||
|
|
@ -84,6 +88,8 @@ registerModelLabels(
|
|||
"pricePerFeatureInstanceCHF": {"en": "Price per Instance (CHF)", "de": "Preis pro Instanz (CHF)"},
|
||||
"maxUsers": {"en": "Max Users", "de": "Max. Benutzer", "fr": "Max. utilisateurs"},
|
||||
"maxFeatureInstances": {"en": "Max Instances", "de": "Max. Instanzen", "fr": "Max. instances"},
|
||||
"maxDataVolumeMB": {"en": "Data Volume (MB)", "de": "Datenvolumen (MB)"},
|
||||
"budgetAiCHF": {"en": "AI Budget (CHF)", "de": "AI-Budget (CHF)"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -122,7 +128,7 @@ registerModelLabels(
|
|||
# Instance: MandateSubscription
|
||||
# ============================================================================
|
||||
|
||||
class MandateSubscription(BaseModel):
|
||||
class MandateSubscription(PowerOnModel):
|
||||
"""A subscription instance bound to a specific mandate.
|
||||
See wiki/concepts/Subscription-State-Machine.md for state transitions."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
|
|
@ -182,20 +188,24 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
|||
autoRenew=False,
|
||||
maxUsers=None,
|
||||
maxFeatureInstances=None,
|
||||
maxDataVolumeMB=None,
|
||||
budgetAiCHF=0.0,
|
||||
),
|
||||
"TRIAL_7D": SubscriptionPlan(
|
||||
planKey="TRIAL_7D",
|
||||
selectableByUser=False,
|
||||
title={"en": "Free Trial (7 days)", "de": "Gratis-Testphase (7 Tage)", "fr": "Essai gratuit (7 jours)"},
|
||||
description={
|
||||
"en": "Try the platform for 7 days — 1 user, up to 3 feature instances.",
|
||||
"de": "Plattform 7 Tage testen — 1 User, bis zu 3 Feature-Instanzen.",
|
||||
"en": "Try the platform for 7 days — 1 user, up to 3 feature instances, 5 CHF AI budget included.",
|
||||
"de": "Plattform 7 Tage testen — 1 User, bis zu 3 Feature-Instanzen, 5 CHF AI-Budget inklusive.",
|
||||
},
|
||||
billingPeriod=BillingPeriodEnum.NONE,
|
||||
autoRenew=False,
|
||||
maxUsers=1,
|
||||
maxFeatureInstances=3,
|
||||
trialDays=7,
|
||||
maxDataVolumeMB=500,
|
||||
budgetAiCHF=5.0,
|
||||
successorPlanKey="STANDARD_MONTHLY",
|
||||
),
|
||||
"STANDARD_MONTHLY": SubscriptionPlan(
|
||||
|
|
@ -203,24 +213,28 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
|||
selectableByUser=True,
|
||||
title={"en": "Standard (Monthly)", "de": "Standard (Monatlich)", "fr": "Standard (Mensuel)"},
|
||||
description={
|
||||
"en": "Usage-based billing per active user and feature instance, billed monthly.",
|
||||
"de": "Nutzungsbasierte Abrechnung pro aktivem User und Feature-Instanz, monatlich.",
|
||||
"en": "Usage-based billing per active user and feature instance, billed monthly. Includes 10 CHF AI budget.",
|
||||
"de": "Nutzungsbasierte Abrechnung pro aktivem User und Feature-Instanz, monatlich. Inkl. 10 CHF AI-Budget.",
|
||||
},
|
||||
billingPeriod=BillingPeriodEnum.MONTHLY,
|
||||
pricePerUserCHF=90.0,
|
||||
pricePerFeatureInstanceCHF=150.0,
|
||||
pricePerUserCHF=79.0,
|
||||
pricePerFeatureInstanceCHF=119.0,
|
||||
maxDataVolumeMB=1024,
|
||||
budgetAiCHF=10.0,
|
||||
),
|
||||
"STANDARD_YEARLY": SubscriptionPlan(
|
||||
planKey="STANDARD_YEARLY",
|
||||
selectableByUser=True,
|
||||
title={"en": "Standard (Yearly)", "de": "Standard (Jährlich)", "fr": "Standard (Annuel)"},
|
||||
description={
|
||||
"en": "Usage-based billing per active user and feature instance, billed yearly.",
|
||||
"de": "Nutzungsbasierte Abrechnung pro aktivem User und Feature-Instanz, jährlich.",
|
||||
"en": "Usage-based billing per active user and feature instance, billed yearly. Includes 120 CHF AI budget.",
|
||||
"de": "Nutzungsbasierte Abrechnung pro aktivem User und Feature-Instanz, jährlich. Inkl. 120 CHF AI-Budget.",
|
||||
},
|
||||
billingPeriod=BillingPeriodEnum.YEARLY,
|
||||
pricePerUserCHF=1080.0,
|
||||
pricePerFeatureInstanceCHF=1800.0,
|
||||
pricePerUserCHF=948.0,
|
||||
pricePerFeatureInstanceCHF=1428.0,
|
||||
maxDataVolumeMB=1024,
|
||||
budgetAiCHF=120.0,
|
||||
),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,9 +10,10 @@ Multi-Tenant Design:
|
|||
"""
|
||||
|
||||
import uuid
|
||||
from typing import Optional, List
|
||||
from typing import Optional, List, Dict, Any
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field, EmailStr, field_validator, computed_field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
|
|
@ -21,6 +22,7 @@ class AuthAuthority(str, Enum):
|
|||
LOCAL = "local"
|
||||
GOOGLE = "google"
|
||||
MSFT = "msft"
|
||||
CLICKUP = "clickup"
|
||||
|
||||
class ConnectionStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
|
|
@ -59,7 +61,7 @@ class UserPermissions(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class Mandate(BaseModel):
|
||||
class Mandate(PowerOnModel):
|
||||
"""
|
||||
Mandate (Mandant/Tenant) model.
|
||||
Ein Mandant ist ein isolierter Bereich für Daten und Berechtigungen.
|
||||
|
|
@ -88,6 +90,11 @@ class Mandate(BaseModel):
|
|||
description="Whether this is a system mandate (e.g. root mandate). Cannot be deleted.",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
deletedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when the mandate was soft-deleted. After 30 days, hard-delete is triggered.",
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
@field_validator('isSystem', mode='before')
|
||||
@classmethod
|
||||
|
|
@ -97,7 +104,6 @@ class Mandate(BaseModel):
|
|||
return False
|
||||
return v
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"Mandate",
|
||||
{"en": "Mandate", "de": "Mandant", "fr": "Mandat"},
|
||||
|
|
@ -107,11 +113,12 @@ registerModelLabels(
|
|||
"label": {"en": "Label", "de": "Label", "fr": "Libellé"},
|
||||
"enabled": {"en": "Enabled", "de": "Aktiviert", "fr": "Activé"},
|
||||
"isSystem": {"en": "System Mandate", "de": "System-Mandant", "fr": "Mandat système"},
|
||||
"deletedAt": {"en": "Deleted at", "de": "Gelöscht am", "fr": "Supprimé le"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class UserConnection(BaseModel):
|
||||
class UserConnection(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
userId: str = Field(description="ID of the user this connection belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
authority: AuthAuthority = Field(description="Authentication authority", json_schema_extra={"frontend_type": "select", "frontend_readonly": True, "frontend_required": False, "frontend_options": "/api/connections/authorities/options"})
|
||||
|
|
@ -141,7 +148,12 @@ class UserConnection(BaseModel):
|
|||
@property
|
||||
def displayLabel(self) -> str:
|
||||
"""Human-readable label for display in dropdowns"""
|
||||
authorityLabels = {"msft": "Microsoft", "google": "Google", "local": "Local"}
|
||||
authorityLabels = {
|
||||
"msft": "Microsoft",
|
||||
"google": "Google",
|
||||
"local": "Local",
|
||||
"clickup": "ClickUp",
|
||||
}
|
||||
return f"{authorityLabels.get(self.authority.value, self.authority.value)}: {self.externalUsername}"
|
||||
|
||||
|
||||
|
|
@ -168,7 +180,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
class User(PowerOnModel):
|
||||
"""
|
||||
User model.
|
||||
|
||||
|
|
@ -255,6 +267,11 @@ class User(BaseModel):
|
|||
description="Primary authentication authority",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_readonly": True, "frontend_required": False, "frontend_options": "/api/connections/authorities/options"}
|
||||
)
|
||||
roleLabels: List[str] = Field(
|
||||
default_factory=list,
|
||||
description="Role labels (from DB or enriched when loading users)",
|
||||
json_schema_extra={"frontend_type": "multiselect", "frontend_readonly": True, "frontend_visible": False, "frontend_required": False},
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -269,6 +286,7 @@ registerModelLabels(
|
|||
"enabled": {"en": "Enabled", "de": "Aktiviert", "fr": "Activé"},
|
||||
"isSysAdmin": {"en": "System Admin", "de": "System-Admin", "fr": "Admin système"},
|
||||
"authenticationAuthority": {"en": "Auth Authority", "de": "Authentifizierung", "fr": "Autorité d'authentification"},
|
||||
"roleLabels": {"en": "Role Labels", "de": "Rollen-Labels", "fr": "Libellés de rôles"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -289,3 +307,65 @@ registerModelLabels(
|
|||
"resetTokenExpires": {"en": "Reset Token Expires", "de": "Token läuft ab", "fr": "Expiration du jeton"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Coerce ttsVoiceMap payloads to Dict[str, str].
|
||||
|
||||
UI/clients may send per-locale objects like {"voiceName": "de-DE-Chirp3-HD-Achird"};
|
||||
storage and model field type are locale -> voice id string.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
if not isinstance(value, dict):
|
||||
return None
|
||||
out: Dict[str, str] = {}
|
||||
for rawKey, rawVal in value.items():
|
||||
key = str(rawKey)
|
||||
if rawVal is None:
|
||||
continue
|
||||
if isinstance(rawVal, str):
|
||||
out[key] = rawVal
|
||||
elif isinstance(rawVal, dict):
|
||||
vn = rawVal.get("voiceName")
|
||||
if vn is not None and str(vn).strip() != "":
|
||||
out[key] = str(vn).strip()
|
||||
else:
|
||||
out[key] = str(rawVal)
|
||||
return out if out else None
|
||||
|
||||
|
||||
class UserVoicePreferences(PowerOnModel):
|
||||
"""User-level voice/language preferences, shared across all features."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
userId: str = Field(description="User ID")
|
||||
mandateId: Optional[str] = Field(default=None, description="Mandate scope (None = global for user)")
|
||||
sttLanguage: str = Field(default="de-DE", description="Speech-to-text language code")
|
||||
ttsLanguage: str = Field(default="de-DE", description="Text-to-speech language code")
|
||||
ttsVoice: Optional[str] = Field(default=None, description="Preferred TTS voice identifier")
|
||||
ttsVoiceMap: Optional[Dict[str, str]] = Field(default=None, description="Language-to-voice mapping")
|
||||
translationSourceLanguage: Optional[str] = Field(default=None, description="Source language for translations")
|
||||
translationTargetLanguage: Optional[str] = Field(default=None, description="Target language for translations")
|
||||
|
||||
@field_validator("ttsVoiceMap", mode="before")
|
||||
@classmethod
|
||||
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
|
||||
return _normalizeTtsVoiceMap(value)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"UserVoicePreferences",
|
||||
{"en": "Voice Preferences", "de": "Spracheinstellungen", "fr": "Préférences vocales"},
|
||||
{
|
||||
"id": {"en": "ID", "de": "ID", "fr": "ID"},
|
||||
"userId": {"en": "User ID", "de": "Benutzer-ID", "fr": "ID utilisateur"},
|
||||
"mandateId": {"en": "Mandate ID", "de": "Mandanten-ID", "fr": "ID du mandat"},
|
||||
"sttLanguage": {"en": "STT Language", "de": "STT-Sprache", "fr": "Langue STT"},
|
||||
"ttsLanguage": {"en": "TTS Language", "de": "TTS-Sprache", "fr": "Langue TTS"},
|
||||
"ttsVoice": {"en": "TTS Voice", "de": "TTS-Stimme", "fr": "Voix TTS"},
|
||||
"ttsVoiceMap": {"en": "Voice Map", "de": "Stimmen-Zuordnung", "fr": "Carte des voix"},
|
||||
"translationSourceLanguage": {"en": "Translation Source", "de": "Übersetzung Quelle", "fr": "Langue source"},
|
||||
"translationTargetLanguage": {"en": "Translation Target", "de": "Übersetzung Ziel", "fr": "Langue cible"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@
|
|||
"""Utility datamodels: Prompt, TextMultilingual."""
|
||||
|
||||
from typing import Dict, Optional
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
import uuid
|
||||
|
||||
|
||||
class Prompt(BaseModel):
|
||||
model_config = ConfigDict(extra='allow') # Preserve system fields (_createdBy, _createdAt, etc.)
|
||||
class Prompt(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
mandateId: str = Field(default="", description="ID of the mandate this prompt belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
isSystem: bool = Field(default=False, description="System prompt visible to all users (read-only for non-SysAdmin)", json_schema_extra={"frontend_type": "boolean", "frontend_readonly": True, "frontend_required": False})
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Voice settings datamodel — re-exported from workspace feature for backward compatibility."""
|
||||
"""Voice settings datamodel — re-exported from UAM for central voice preferences."""
|
||||
|
||||
from modules.features.workspace.datamodelFeatureWorkspace import VoiceSettings
|
||||
from modules.datamodels.datamodelUam import UserVoicePreferences
|
||||
|
||||
__all__ = ["VoiceSettings"]
|
||||
__all__ = ["UserVoicePreferences"]
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
from typing import List, Dict, Any, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.datamodels.datamodelUtils import TextMultilingual
|
||||
import uuid
|
||||
|
|
@ -48,7 +49,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class AutomationTemplate(BaseModel):
|
||||
class AutomationTemplate(PowerOnModel):
|
||||
"""Automation-Vorlage ohne scharfe Placeholder-Werte (DB-persistiert).
|
||||
|
||||
System-Templates (isSystem=True): Nur durch SysAdmin aenderbar. Alle User koennen lesen.
|
||||
|
|
@ -82,9 +83,6 @@ class AutomationTemplate(BaseModel):
|
|||
description="Feature instance ID (null for system templates, set for instance-scoped templates)",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
# System fields (_createdAt, _createdBy, etc.) werden automatisch vom DB-Connector gesetzt
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"AutomationTemplate",
|
||||
{"en": "Automation Template", "ge": "Automation-Vorlage", "fr": "Modèle d'automatisation"},
|
||||
|
|
|
|||
|
|
@ -22,6 +22,13 @@ from modules.shared.configuration import APP_CONFIG
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _automationDefinitionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Strip connector/enrichment keys; only fields defined on AutomationDefinition."""
|
||||
allowed = AutomationDefinition.model_fields.keys()
|
||||
return {k: v for k, v in (data or {}).items() if k in allowed}
|
||||
|
||||
|
||||
# Singleton factory for Automation instances
|
||||
_automationInterfaces = {}
|
||||
|
||||
|
|
@ -100,7 +107,7 @@ class AutomationObjects:
|
|||
if recordId:
|
||||
record = self.db.getRecordset(model, recordFilter={"id": recordId})
|
||||
if record:
|
||||
return record[0].get("_createdBy") == self.userId
|
||||
return record[0].get("sysCreatedBy") == self.userId
|
||||
else:
|
||||
return False # Record not found = no access
|
||||
return True # No recordId needed (e.g., for CREATE)
|
||||
|
|
@ -130,7 +137,7 @@ class AutomationObjects:
|
|||
featureInstanceIds = set()
|
||||
|
||||
for automation in automations:
|
||||
createdBy = automation.get("_createdBy")
|
||||
createdBy = automation.get("sysCreatedBy")
|
||||
if createdBy:
|
||||
userIds.add(createdBy)
|
||||
|
||||
|
|
@ -186,8 +193,8 @@ class AutomationObjects:
|
|||
# Enrich each automation with the fetched data
|
||||
# SECURITY: Never show a fallback name — if lookup fails, show empty string
|
||||
for automation in automations:
|
||||
createdBy = automation.get("_createdBy")
|
||||
automation["_createdByUserName"] = usersMap.get(createdBy, "") if createdBy else ""
|
||||
createdBy = automation.get("sysCreatedBy")
|
||||
automation["sysCreatedByUserName"] = usersMap.get(createdBy, "") if createdBy else ""
|
||||
|
||||
mandateId = automation.get("mandateId")
|
||||
automation["mandateName"] = mandatesMap.get(mandateId, "") if mandateId else ""
|
||||
|
|
@ -295,7 +302,7 @@ class AutomationObjects:
|
|||
|
||||
Args:
|
||||
automationId: ID of the automation to get
|
||||
includeSystemFields: If True, returns raw dict with system fields (_createdBy, etc).
|
||||
includeSystemFields: If True, returns raw dict with system fields (sysCreatedBy, etc).
|
||||
If False (default), returns Pydantic model without system fields.
|
||||
"""
|
||||
try:
|
||||
|
|
@ -330,7 +337,7 @@ class AutomationObjects:
|
|||
return AutomationWithSystemFields(automation)
|
||||
|
||||
# Clean metadata fields and return Pydantic model
|
||||
cleanedRecord = {k: v for k, v in automation.items() if not k.startswith("_")}
|
||||
cleanedRecord = _automationDefinitionPayload(automation)
|
||||
return AutomationDefinition(**cleanedRecord)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting automation definition: {str(e)}")
|
||||
|
|
@ -365,7 +372,7 @@ class AutomationObjects:
|
|||
|
||||
# Ensure database connector has correct userId context
|
||||
if not self.userId:
|
||||
logger.error(f"createAutomationDefinition: userId is not set! Cannot set _createdBy. currentUser={self.currentUser}")
|
||||
logger.error(f"createAutomationDefinition: userId is not set! Cannot set sysCreatedBy. currentUser={self.currentUser}")
|
||||
elif hasattr(self.db, 'updateContext'):
|
||||
try:
|
||||
self.db.updateContext(self.userId)
|
||||
|
|
@ -386,7 +393,7 @@ class AutomationObjects:
|
|||
self._notifyAutomationChanged()
|
||||
|
||||
# Clean metadata fields and return Pydantic model
|
||||
cleanedRecord = {k: v for k, v in createdAutomation.items() if not k.startswith("_")}
|
||||
cleanedRecord = _automationDefinitionPayload(createdAutomation)
|
||||
return AutomationDefinition(**cleanedRecord)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating automation definition: {str(e)}")
|
||||
|
|
@ -446,7 +453,7 @@ class AutomationObjects:
|
|||
self._notifyAutomationChanged()
|
||||
|
||||
# Clean metadata fields and return Pydantic model
|
||||
cleanedRecord = {k: v for k, v in updatedAutomation.items() if not k.startswith("_")}
|
||||
cleanedRecord = _automationDefinitionPayload(updatedAutomation)
|
||||
return AutomationDefinition(**cleanedRecord)
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating automation definition: {str(e)}")
|
||||
|
|
@ -561,7 +568,7 @@ class AutomationObjects:
|
|||
# Collect unique user IDs
|
||||
userIds = set()
|
||||
for template in templates:
|
||||
createdBy = template.get("_createdBy")
|
||||
createdBy = template.get("sysCreatedBy")
|
||||
if createdBy:
|
||||
userIds.add(createdBy)
|
||||
|
||||
|
|
@ -585,8 +592,8 @@ class AutomationObjects:
|
|||
|
||||
# Apply to templates — SECURITY: no fallback, empty if not found
|
||||
for template in templates:
|
||||
createdBy = template.get("_createdBy")
|
||||
template["_createdByUserName"] = userNameMap.get(createdBy, "") if createdBy else ""
|
||||
createdBy = template.get("sysCreatedBy")
|
||||
template["sysCreatedByUserName"] = userNameMap.get(createdBy, "") if createdBy else ""
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not enrich templates with user names: {e}")
|
||||
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ def getFeatureDefinition() -> Dict[str, Any]:
|
|||
"code": FEATURE_CODE,
|
||||
"label": FEATURE_LABEL,
|
||||
"icon": FEATURE_ICON,
|
||||
"autoCreateInstance": True, # Automatically create instance in root mandate during bootstrap
|
||||
"autoCreateInstance": False,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -77,8 +77,8 @@ def get_automations(
|
|||
|
||||
# If pagination was requested, result is PaginatedResult
|
||||
# If no pagination, result is List[Dict]
|
||||
# Note: Using JSONResponse to bypass Pydantic validation which would filter out _createdBy
|
||||
# The enriched fields (_createdByUserName, mandateName) are not in the Pydantic model
|
||||
# Note: Using JSONResponse to bypass Pydantic validation which would filter out sysCreatedBy
|
||||
# The enriched fields (sysCreatedByUserName, mandateName) are not in the Pydantic model
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
if paginationParams:
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
from typing import Dict, Any, List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
import uuid
|
||||
|
||||
|
|
@ -36,6 +37,11 @@ class Automation2Workflow(BaseModel):
|
|||
description="Whether workflow is active",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False},
|
||||
)
|
||||
invocations: List[Dict[str, Any]] = Field(
|
||||
default_factory=list,
|
||||
description="Entry points / starts (manual, form, schedule, webhook, …) configured outside the canvas",
|
||||
json_schema_extra={"frontend_type": "textarea", "frontend_required": False},
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -48,11 +54,12 @@ registerModelLabels(
|
|||
"label": {"en": "Label", "de": "Bezeichnung", "fr": "Libellé"},
|
||||
"graph": {"en": "Graph", "de": "Graph", "fr": "Graphe"},
|
||||
"active": {"en": "Active", "de": "Aktiv", "fr": "Actif"},
|
||||
"invocations": {"en": "Starts / Entry points", "de": "Starts / Einstiegspunkte", "fr": "Points d'entrée"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class Automation2WorkflowRun(BaseModel):
|
||||
class Automation2WorkflowRun(PowerOnModel):
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
description="Primary key",
|
||||
|
|
@ -98,7 +105,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class Automation2HumanTask(BaseModel):
|
||||
class Automation2HumanTask(PowerOnModel):
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
description="Primary key",
|
||||
|
|
|
|||
96
modules/features/automation2/entryPoints.py
Normal file
96
modules/features/automation2/entryPoints.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""
|
||||
Workflow entry points (Starts) — configuration outside the flow editor.
|
||||
|
||||
Kinds align with run envelope trigger.type where applicable.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
# On-demand (gear: Manueller Trigger, Formular)
|
||||
KINDS_ON_DEMAND = frozenset({"manual", "form", "api"})
|
||||
|
||||
# Always-on (gear: Zeitplan, Immer aktiv, plus legacy listener kinds)
|
||||
KINDS_ALWAYS_ON = frozenset({"schedule", "always_on", "email", "webhook", "event"})
|
||||
|
||||
ALL_KINDS = KINDS_ON_DEMAND | KINDS_ALWAYS_ON
|
||||
|
||||
|
||||
def category_for_kind(kind: str) -> str:
|
||||
if kind in KINDS_ALWAYS_ON:
|
||||
return "always_on"
|
||||
return "on_demand"
|
||||
|
||||
|
||||
def default_manual_entry_point() -> Dict[str, Any]:
|
||||
"""Single default manual start when a workflow has no invocations yet."""
|
||||
return {
|
||||
"id": str(uuid.uuid4()),
|
||||
"kind": "manual",
|
||||
"category": "on_demand",
|
||||
"enabled": True,
|
||||
"title": {
|
||||
"de": "Jetzt ausführen",
|
||||
"en": "Run now",
|
||||
"fr": "Exécuter",
|
||||
},
|
||||
"description": {},
|
||||
"config": {},
|
||||
}
|
||||
|
||||
|
||||
def _normalize_title(title: Any) -> Dict[str, str]:
|
||||
if isinstance(title, dict):
|
||||
return {k: str(v) for k, v in title.items() if v is not None}
|
||||
if isinstance(title, str) and title.strip():
|
||||
return {"de": title, "en": title, "fr": title}
|
||||
return {"de": "Start", "en": "Start", "fr": "Départ"}
|
||||
|
||||
|
||||
def normalize_invocation_entry(raw: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Validate and normalize a single entry point dict."""
|
||||
kind = (raw.get("kind") or "manual").strip()
|
||||
if kind not in ALL_KINDS:
|
||||
kind = "manual"
|
||||
cat = raw.get("category")
|
||||
if cat not in ("on_demand", "always_on"):
|
||||
cat = category_for_kind(kind)
|
||||
eid = raw.get("id") or str(uuid.uuid4())
|
||||
enabled = raw.get("enabled", True)
|
||||
if not isinstance(enabled, bool):
|
||||
enabled = bool(enabled)
|
||||
config = raw.get("config") if isinstance(raw.get("config"), dict) else {}
|
||||
desc = raw.get("description") if isinstance(raw.get("description"), dict) else {}
|
||||
return {
|
||||
"id": str(eid),
|
||||
"kind": kind,
|
||||
"category": cat,
|
||||
"enabled": enabled,
|
||||
"title": _normalize_title(raw.get("title")),
|
||||
"description": desc,
|
||||
"config": config,
|
||||
}
|
||||
|
||||
|
||||
def normalize_invocations_list(items: Optional[List[Any]]) -> List[Dict[str, Any]]:
|
||||
if not items:
|
||||
return [default_manual_entry_point()]
|
||||
out: List[Dict[str, Any]] = []
|
||||
for raw in items:
|
||||
if isinstance(raw, dict):
|
||||
out.append(normalize_invocation_entry(raw))
|
||||
if not out:
|
||||
return [default_manual_entry_point()]
|
||||
return out
|
||||
|
||||
|
||||
# Schedule / cron: wire an external job runner (APScheduler, Celery, system cron) to call
|
||||
# POST .../execute with entryPointId set to a schedule entry — no separate in-process scheduler here yet.
|
||||
|
||||
|
||||
def find_invocation(workflow: Dict[str, Any], entry_point_id: str) -> Optional[Dict[str, Any]]:
|
||||
for inv in workflow.get("invocations") or []:
|
||||
if isinstance(inv, dict) and inv.get("id") == entry_point_id:
|
||||
return inv
|
||||
return None
|
||||
|
|
@ -30,6 +30,7 @@ from modules.features.automation2.datamodelFeatureAutomation2 import (
|
|||
Automation2WorkflowRun,
|
||||
Automation2HumanTask,
|
||||
)
|
||||
from modules.features.automation2.entryPoints import normalize_invocations_list
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
|
|
@ -49,6 +50,83 @@ def getAutomation2Interface(
|
|||
)
|
||||
|
||||
|
||||
def getAllWorkflowsForScheduling() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all active Automation2 workflows that have a schedule entry point (primary invocation).
|
||||
Used by the scheduler to register cron jobs. Does not filter by mandate/instance.
|
||||
"""
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "localhost")
|
||||
dbDatabase = "poweron_automation2"
|
||||
dbUser = APP_CONFIG.get("DB_USER")
|
||||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
connector = DatabaseConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
dbPassword=dbPassword,
|
||||
dbPort=dbPort,
|
||||
userId=None,
|
||||
)
|
||||
if not connector._ensureTableExists(Automation2Workflow):
|
||||
logger.warning("Automation2 schedule: table Automation2Workflow does not exist")
|
||||
return []
|
||||
# Don't filter by active in SQL: existing workflows may have active=NULL.
|
||||
# Treat NULL as active; skip only when active is explicitly False.
|
||||
records = connector.getRecordset(
|
||||
Automation2Workflow,
|
||||
recordFilter=None,
|
||||
)
|
||||
raw_count = len(records) if records else 0
|
||||
result = []
|
||||
for r in records or []:
|
||||
if r.get("active") is False:
|
||||
continue
|
||||
wf = dict(r)
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
invocations = wf.get("invocations") or []
|
||||
primary = invocations[0] if invocations else {}
|
||||
if not isinstance(primary, dict):
|
||||
primary = {}
|
||||
|
||||
# Cron comes from graph start node params (trigger.schedule)
|
||||
graph = wf.get("graph") or {}
|
||||
nodes = graph.get("nodes") or []
|
||||
cron = None
|
||||
for n in nodes:
|
||||
if n.get("type") == "trigger.schedule":
|
||||
params = n.get("parameters") or {}
|
||||
cron = params.get("cron")
|
||||
if cron:
|
||||
break
|
||||
|
||||
if not cron or not isinstance(cron, str) or not cron.strip():
|
||||
continue
|
||||
|
||||
# Prefer invocations; if graph has trigger.schedule but invocations say manual, still schedule
|
||||
if primary.get("kind") == "schedule" and primary.get("enabled", True):
|
||||
entry_point_id = primary.get("id")
|
||||
elif invocations and isinstance(invocations[0], dict) and invocations[0].get("id"):
|
||||
entry_point_id = invocations[0].get("id")
|
||||
else:
|
||||
entry_point_id = str(uuid.uuid4())
|
||||
|
||||
result.append({
|
||||
"workflowId": wf.get("id"),
|
||||
"mandateId": wf.get("mandateId"),
|
||||
"featureInstanceId": wf.get("featureInstanceId"),
|
||||
"entryPointId": entry_point_id,
|
||||
"cron": cron.strip(),
|
||||
"workflow": wf,
|
||||
})
|
||||
logger.info(
|
||||
"Automation2 schedule: DB has %d workflow(s), %d active with trigger.schedule+cron",
|
||||
raw_count,
|
||||
len(result),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class Automation2Objects:
|
||||
"""Interface for Automation2 database operations."""
|
||||
|
||||
|
|
@ -87,18 +165,26 @@ class Automation2Objects:
|
|||
# Workflow CRUD
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def getWorkflows(self) -> List[Dict[str, Any]]:
|
||||
"""Get all workflows for this mandate and feature instance."""
|
||||
def getWorkflows(self, active: Optional[bool] = None) -> List[Dict[str, Any]]:
|
||||
"""Get all workflows for this mandate and feature instance.
|
||||
Optional active filter: True=only active, False=only inactive, None=all.
|
||||
"""
|
||||
if not self.db._ensureTableExists(Automation2Workflow):
|
||||
return []
|
||||
records = self.db.getRecordset(
|
||||
Automation2Workflow,
|
||||
recordFilter={
|
||||
rf: Dict[str, Any] = {
|
||||
"mandateId": self.mandateId,
|
||||
"featureInstanceId": self.featureInstanceId,
|
||||
},
|
||||
}
|
||||
if active is not None:
|
||||
rf["active"] = active
|
||||
records = self.db.getRecordset(
|
||||
Automation2Workflow,
|
||||
recordFilter=rf,
|
||||
)
|
||||
return [dict(r) for r in records] if records else []
|
||||
rows = [dict(r) for r in records] if records else []
|
||||
for wf in rows:
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
return rows
|
||||
|
||||
def getWorkflow(self, workflowId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single workflow by ID."""
|
||||
|
|
@ -114,7 +200,9 @@ class Automation2Objects:
|
|||
)
|
||||
if not records:
|
||||
return None
|
||||
return dict(records[0])
|
||||
wf = dict(records[0])
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
return wf
|
||||
|
||||
def createWorkflow(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new workflow."""
|
||||
|
|
@ -122,8 +210,18 @@ class Automation2Objects:
|
|||
data["id"] = str(uuid.uuid4())
|
||||
data["mandateId"] = self.mandateId
|
||||
data["featureInstanceId"] = self.featureInstanceId
|
||||
if "active" not in data or data.get("active") is None:
|
||||
data["active"] = True
|
||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||
created = self.db.recordCreate(Automation2Workflow, data)
|
||||
return dict(created)
|
||||
out = dict(created)
|
||||
out["invocations"] = normalize_invocations_list(out.get("invocations"))
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def updateWorkflow(self, workflowId: str, data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update an existing workflow."""
|
||||
|
|
@ -133,8 +231,17 @@ class Automation2Objects:
|
|||
# Don't overwrite mandateId/featureInstanceId
|
||||
data.pop("mandateId", None)
|
||||
data.pop("featureInstanceId", None)
|
||||
if "invocations" in data:
|
||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||
updated = self.db.recordModify(Automation2Workflow, workflowId, data)
|
||||
return dict(updated)
|
||||
out = dict(updated)
|
||||
out["invocations"] = normalize_invocations_list(out.get("invocations"))
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def deleteWorkflow(self, workflowId: str) -> bool:
|
||||
"""Delete a workflow."""
|
||||
|
|
@ -142,6 +249,11 @@ class Automation2Objects:
|
|||
if not existing:
|
||||
return False
|
||||
self.db.recordDelete(Automation2Workflow, workflowId)
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
|
@ -209,6 +321,28 @@ class Automation2Objects:
|
|||
)
|
||||
return [dict(r) for r in records] if records else []
|
||||
|
||||
def getRecentCompletedRuns(self, limit: int = 20) -> List[Dict[str, Any]]:
|
||||
"""Get recently completed runs for workflows in this instance (for output display)."""
|
||||
if not self.db._ensureTableExists(Automation2WorkflowRun):
|
||||
return []
|
||||
workflows = self.getWorkflows()
|
||||
wf_ids = [w["id"] for w in workflows if w.get("id")]
|
||||
if not wf_ids:
|
||||
return []
|
||||
records = self.db.getRecordset(
|
||||
Automation2WorkflowRun,
|
||||
recordFilter={"status": "completed"},
|
||||
)
|
||||
if not records:
|
||||
return []
|
||||
runs = [dict(r) for r in records if r.get("workflowId") in wf_ids]
|
||||
wf_by_id = {w["id"]: w for w in workflows}
|
||||
for r in runs:
|
||||
wf = wf_by_id.get(r.get("workflowId"), {})
|
||||
r["workflowLabel"] = wf.get("label") or r.get("workflowId", "")
|
||||
runs.sort(key=lambda x: (x.get("_modifiedAt") or x.get("_createdAt") or 0), reverse=True)
|
||||
return runs[:limit]
|
||||
|
||||
def getRunsWaitingForEmail(self) -> List[Dict[str, Any]]:
|
||||
"""Get all paused runs waiting for a new email (for background poller)."""
|
||||
if not self.db._ensureTableExists(Automation2WorkflowRun):
|
||||
|
|
@ -289,21 +423,36 @@ class Automation2Objects:
|
|||
status: str = None,
|
||||
assigneeId: str = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get tasks with optional filters. AssigneeId filters to that user; None returns all."""
|
||||
"""Get tasks with optional filters.
|
||||
When assigneeId is set: returns tasks assigned to that user OR unassigned (so schedule tasks show up).
|
||||
When assigneeId is None: returns all tasks.
|
||||
"""
|
||||
if not self.db._ensureTableExists(Automation2HumanTask):
|
||||
return []
|
||||
rf = {}
|
||||
base_rf: Dict[str, Any] = {}
|
||||
if workflowId:
|
||||
rf["workflowId"] = workflowId
|
||||
base_rf["workflowId"] = workflowId
|
||||
if runId:
|
||||
rf["runId"] = runId
|
||||
base_rf["runId"] = runId
|
||||
if status:
|
||||
rf["status"] = status
|
||||
base_rf["status"] = status
|
||||
if assigneeId:
|
||||
rf["assigneeId"] = assigneeId
|
||||
rf_assigned = {**base_rf, "assigneeId": assigneeId}
|
||||
rf_unassigned = {**base_rf, "assigneeId": None}
|
||||
records1 = self.db.getRecordset(Automation2HumanTask, recordFilter=rf_assigned)
|
||||
records2 = self.db.getRecordset(Automation2HumanTask, recordFilter=rf_unassigned)
|
||||
seen = set()
|
||||
items = []
|
||||
for r in (records1 or []) + (records2 or []):
|
||||
rec = dict(r)
|
||||
tid = rec.get("id")
|
||||
if tid and tid not in seen:
|
||||
seen.add(tid)
|
||||
items.append(rec)
|
||||
else:
|
||||
records = self.db.getRecordset(
|
||||
Automation2HumanTask,
|
||||
recordFilter=rf if rf else None,
|
||||
recordFilter=base_rf if base_rf else None,
|
||||
)
|
||||
items = [dict(r) for r in records] if records else []
|
||||
workflows = {w["id"]: w for w in self.getWorkflows()}
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ REQUIRED_SERVICES = [
|
|||
{"serviceKey": "ai", "meta": {"usage": "AI nodes"}},
|
||||
{"serviceKey": "extraction", "meta": {"usage": "Workflow method actions"}},
|
||||
{"serviceKey": "sharepoint", "meta": {"usage": "SharePoint actions"}},
|
||||
{"serviceKey": "clickup", "meta": {"usage": "ClickUp actions"}},
|
||||
{"serviceKey": "generation", "meta": {"usage": "file.create document rendering"}},
|
||||
]
|
||||
FEATURE_LABEL = {"en": "Automation 2", "de": "Automatisierung 2", "fr": "Automatisation 2"}
|
||||
FEATURE_ICON = "mdi-sitemap"
|
||||
|
|
@ -60,12 +62,25 @@ RESOURCE_OBJECTS = [
|
|||
]
|
||||
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "automation2-viewer",
|
||||
"description": {
|
||||
"en": "Automation2 Viewer - View workflows (read-only)",
|
||||
"de": "Automation2 Betrachter - Workflows ansehen (nur lesen)",
|
||||
"fr": "Visualiseur Automation2 - Consulter les workflows (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.automation2.workflows", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.automation2.workflows-tasks", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "automation2-user",
|
||||
"description": {
|
||||
"en": "Automation2 User - Use automation2 flow builder",
|
||||
"de": "Automation2 Benutzer - Flow-Builder nutzen",
|
||||
"fr": "Utilisateur Automation2 - Utiliser le flow builder"
|
||||
"fr": "Utilisateur Automation2 - Utiliser le flow builder",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.automation2.editor", "view": True},
|
||||
|
|
@ -75,7 +90,20 @@ TEMPLATE_ROLES = [
|
|||
{"context": "RESOURCE", "item": "resource.feature.automation2.node-types", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.automation2.execute", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "automation2-admin",
|
||||
"description": {
|
||||
"en": "Automation2 Admin - Full UI and API for the instance; data remains user-scoped (MY)",
|
||||
"de": "Automation2 Admin - Volle UI und API für die Instanz; Daten weiterhin benutzerspezifisch (MY)",
|
||||
"fr": "Administrateur Automation2 - UI et API complets pour l'instance; donnees limitees a l'utilisateur (MY)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
{"context": "RESOURCE", "item": None, "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
@ -157,6 +185,8 @@ class _Automation2ServiceHub:
|
|||
utils = None
|
||||
extraction = None
|
||||
sharepoint = None
|
||||
clickup = None
|
||||
generation = None
|
||||
|
||||
|
||||
async def onStart(eventUser) -> None:
|
||||
|
|
@ -175,7 +205,7 @@ def getFeatureDefinition() -> Dict[str, Any]:
|
|||
"code": FEATURE_CODE,
|
||||
"label": FEATURE_LABEL,
|
||||
"icon": FEATURE_ICON,
|
||||
"autoCreateInstance": True,
|
||||
"autoCreateInstance": False,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,18 +3,20 @@
|
|||
|
||||
from .triggers import TRIGGER_NODES
|
||||
from .flow import FLOW_NODES
|
||||
from .data import DATA_NODES
|
||||
from .input import INPUT_NODES
|
||||
from .ai import AI_NODES
|
||||
from .email import EMAIL_NODES
|
||||
from .sharepoint import SHAREPOINT_NODES
|
||||
from .clickup import CLICKUP_NODES
|
||||
from .file import FILE_NODES
|
||||
|
||||
STATIC_NODE_TYPES = (
|
||||
TRIGGER_NODES
|
||||
+ FLOW_NODES
|
||||
+ DATA_NODES
|
||||
+ INPUT_NODES
|
||||
+ AI_NODES
|
||||
+ EMAIL_NODES
|
||||
+ SHAREPOINT_NODES
|
||||
+ CLICKUP_NODES
|
||||
+ FILE_NODES
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ AI_NODES = [
|
|||
"description": {"en": "Enter a prompt and AI does something", "de": "Prompt eingeben und KI führt aus", "fr": "Entrer une invite et l'IA exécute"},
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "description": {"en": "AI prompt", "de": "KI-Prompt", "fr": "Invite IA"}},
|
||||
{"name": "resultType", "type": "string", "required": False, "description": {"en": "Output format (txt, json, md, etc.)", "de": "Ausgabeformat", "fr": "Format de sortie"}, "default": "txt"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -85,7 +84,6 @@ AI_NODES = [
|
|||
"description": {"en": "Generate document from prompt", "de": "Dokument aus Prompt generieren", "fr": "Générer un document"},
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "description": {"en": "Generation prompt", "de": "Generierungs-Prompt", "fr": "Invite de génération"}},
|
||||
{"name": "format", "type": "string", "required": False, "description": {"en": "Output format", "de": "Ausgabeformat", "fr": "Format de sortie"}, "default": "docx"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
227
modules/features/automation2/nodeDefinitions/clickup.py
Normal file
227
modules/features/automation2/nodeDefinitions/clickup.py
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp nodes — map to MethodClickup actions."""
|
||||
|
||||
CLICKUP_NODES = [
|
||||
{
|
||||
"id": "clickup.searchTasks",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Search tasks", "de": "Aufgaben suchen", "fr": "Rechercher tâches"},
|
||||
"description": {
|
||||
"en": "Search tasks in a workspace (team)",
|
||||
"de": "Aufgaben in einem Workspace suchen",
|
||||
"fr": "Rechercher des tâches dans un espace",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "teamId", "type": "string", "required": True, "description": {"en": "Workspace (team) ID", "de": "Team-/Workspace-ID", "fr": "ID équipe"}},
|
||||
{"name": "query", "type": "string", "required": True, "description": {"en": "Search query", "de": "Suchbegriff", "fr": "Requête"}},
|
||||
{"name": "page", "type": "number", "required": False, "description": {"en": "Page", "de": "Seite", "fr": "Page"}, "default": 0},
|
||||
{
|
||||
"name": "listId",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "If set, search this list via list API (not team search).",
|
||||
"de": "Wenn gesetzt: Suche in dieser Liste (Listen-API, nicht Team-Suche).",
|
||||
"fr": "Si défini : recherche dans cette liste (API liste).",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "includeClosed",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": False,
|
||||
"description": {
|
||||
"en": "With listId: include closed tasks.",
|
||||
"de": "Mit Liste: erledigte Aufgaben einbeziehen.",
|
||||
"fr": "Avec liste : inclure les tâches terminées.",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "fullTaskData",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": False,
|
||||
"description": {
|
||||
"en": "Return full ClickUp API JSON per task (very large). Default: slim fields only.",
|
||||
"de": "Vollständige ClickUp-Rohdaten pro Task (sehr groß). Standard: nur schlanke Felder.",
|
||||
"fr": "Réponse brute complète (très volumineuse). Par défaut : champs réduits.",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "matchNameOnly",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": True,
|
||||
"description": {
|
||||
"en": "Keep only tasks whose title contains the search query (default: on).",
|
||||
"de": "Nur Aufgaben, deren Titel den Suchbegriff enthält (Standard: an).",
|
||||
"fr": "Ne garder que les tâches dont le titre contient la requête (défaut : oui).",
|
||||
},
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-magnify", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "searchTasks",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"teamId": "teamId",
|
||||
"query": "query",
|
||||
"page": "page",
|
||||
"listId": "listId",
|
||||
"fullTaskData": "fullTaskData",
|
||||
"matchNameOnly": "matchNameOnly",
|
||||
"includeClosed": "includeClosed",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.listTasks",
|
||||
"category": "clickup",
|
||||
"label": {"en": "List tasks", "de": "Aufgaben auflisten", "fr": "Lister les tâches"},
|
||||
"description": {
|
||||
"en": "List tasks in a list (pick list path from browse)",
|
||||
"de": "Aufgaben einer Liste auflisten (Pfad aus Browse)",
|
||||
"fr": "Lister les tâches d'une liste",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "path", "type": "string", "required": True, "description": {"en": "Virtual path to list /team/.../list/...", "de": "Pfad zur Liste", "fr": "Chemin vers la liste"}},
|
||||
{"name": "page", "type": "number", "required": False, "description": {"en": "Page", "de": "Seite", "fr": "Page"}, "default": 0},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "description": {"en": "Include closed", "de": "Erledigte einbeziehen", "fr": "Inclure terminées"}, "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "listTasks",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"path": "pathQuery",
|
||||
"page": "page",
|
||||
"includeClosed": "includeClosed",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.getTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Get task", "de": "Aufgabe abrufen", "fr": "Obtenir la tâche"},
|
||||
"description": {"en": "Get one task by ID or path", "de": "Eine Aufgabe abrufen", "fr": "Obtenir une tâche"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path .../task/{id}", "de": "Oder Pfad .../task/{id}", "fr": "Ou chemin .../task/{id}"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "getTask",
|
||||
"_paramMap": {"connectionId": "connectionReference", "taskId": "taskId", "path": "pathQuery"},
|
||||
},
|
||||
{
|
||||
"id": "clickup.createTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Create task", "de": "Aufgabe erstellen", "fr": "Créer une tâche"},
|
||||
"description": {"en": "Create a task in a list", "de": "Aufgabe in einer Liste erstellen", "fr": "Créer une tâche dans une liste"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "teamId", "type": "string", "required": False, "description": {"en": "Workspace (team) for list picker", "de": "Workspace für Listen-Auswahl", "fr": "Équipe"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Optional path /team/.../list/...", "de": "Optional: Pfad zur Liste", "fr": "Chemin optionnel"}},
|
||||
{"name": "listId", "type": "string", "required": False, "description": {"en": "List ID", "de": "Listen-ID", "fr": "ID liste"}},
|
||||
{"name": "name", "type": "string", "required": True, "description": {"en": "Task name", "de": "Name", "fr": "Nom"}},
|
||||
{"name": "description", "type": "string", "required": False, "description": {"en": "Description", "de": "Beschreibung", "fr": "Description"}},
|
||||
{"name": "taskStatus", "type": "string", "required": False, "description": {"en": "Status (list status name)", "de": "Status (wie in der Liste)", "fr": "Statut"}},
|
||||
{"name": "taskPriority", "type": "string", "required": False, "description": {"en": "1–4 or empty", "de": "1–4 oder leer", "fr": "1–4"}},
|
||||
{"name": "taskDueDateMs", "type": "string", "required": False, "description": {"en": "Due date (Unix ms)", "de": "Fälligkeit (ms)", "fr": "Échéance (ms)"}},
|
||||
{"name": "taskAssigneeIds", "type": "object", "required": False, "description": {"en": "Assignee user ids", "de": "Zugewiesene (User-IDs)", "fr": "Assignés"}},
|
||||
{"name": "taskTimeEstimateMs", "type": "string", "required": False, "description": {"en": "Time estimate (ms)", "de": "Zeitschätzung (ms)", "fr": "Estimation (ms)"}},
|
||||
{"name": "taskTimeEstimateHours", "type": "string", "required": False, "description": {"en": "Time estimate (hours)", "de": "Zeitschätzung (Stunden)", "fr": "Heures"}},
|
||||
{"name": "customFieldValues", "type": "object", "required": False, "description": {"en": "Custom field id → value", "de": "Benutzerdefinierte Felder", "fr": "Champs personnalisés"}},
|
||||
{"name": "taskFields", "type": "string", "required": False, "description": {"en": "Extra JSON (advanced)", "de": "Zusätzliches JSON (fortgeschritten)", "fr": "JSON avancé"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "createTask",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"teamId": "teamId",
|
||||
"path": "pathQuery",
|
||||
"listId": "listId",
|
||||
"name": "name",
|
||||
"description": "description",
|
||||
"taskStatus": "taskStatus",
|
||||
"taskPriority": "taskPriority",
|
||||
"taskDueDateMs": "taskDueDateMs",
|
||||
"taskAssigneeIds": "taskAssigneeIds",
|
||||
"taskTimeEstimateMs": "taskTimeEstimateMs",
|
||||
"taskTimeEstimateHours": "taskTimeEstimateHours",
|
||||
"customFieldValues": "customFieldValues",
|
||||
"taskFields": "taskFields",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.updateTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Update task", "de": "Aufgabe aktualisieren", "fr": "Mettre à jour la tâche"},
|
||||
"description": {
|
||||
"en": "Update task fields (rows or JSON)",
|
||||
"de": "Felder der Aufgabe ändern (Zeilen oder JSON)",
|
||||
"fr": "Mettre à jour les champs (lignes ou JSON)",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path to task", "de": "Oder Pfad", "fr": "Ou chemin"}},
|
||||
{
|
||||
"name": "taskUpdateEntries",
|
||||
"type": "object",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "List of {fieldKey, value, customFieldId?}",
|
||||
"de": "Liste der zu ändernden Felder (fieldKey, value, optional customFieldId)",
|
||||
"fr": "Liste de champs à mettre à jour",
|
||||
},
|
||||
},
|
||||
{"name": "taskUpdate", "type": "string", "required": False, "description": {"en": "JSON body for API (optional if rows set)", "de": "JSON für API (optional wenn Zeilen gesetzt)", "fr": "Corps JSON"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "updateTask",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"taskId": "taskId",
|
||||
"path": "path",
|
||||
"taskUpdate": "taskUpdate",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.uploadAttachment",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Upload attachment", "de": "Anhang hochladen", "fr": "Téléverser pièce jointe"},
|
||||
"description": {"en": "Upload file to a task (upstream file)", "de": "Datei an Task anhängen", "fr": "Joindre un fichier à la tâche"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path to task", "de": "Oder Pfad", "fr": "Ou chemin"}},
|
||||
{"name": "fileName", "type": "string", "required": False, "description": {"en": "File name", "de": "Dateiname", "fr": "Nom du fichier"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-attachment", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "uploadAttachment",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"taskId": "taskId",
|
||||
"path": "path",
|
||||
"fileName": "fileName",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Data transformation node definitions.
|
||||
|
||||
DATA_NODES = [
|
||||
{
|
||||
"id": "data.setFields",
|
||||
"category": "data",
|
||||
"label": {"en": "Set Fields", "de": "Felder setzen", "fr": "Définir champs"},
|
||||
"description": {"en": "Set or override fields on payload", "de": "Felder setzen oder überschreiben", "fr": "Définir ou écraser des champs"},
|
||||
"parameters": [
|
||||
{"name": "fields", "type": "object", "required": True, "description": {"en": "Key-value pairs", "de": "Schlüssel-Wert-Paare", "fr": "Paires clé-valeur"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-pencil", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.filter",
|
||||
"category": "data",
|
||||
"label": {"en": "Filter", "de": "Filtern", "fr": "Filtrer"},
|
||||
"description": {"en": "Filter array by condition", "de": "Array nach Bedingung filtern", "fr": "Filtrer tableau par condition"},
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "description": {"en": "Expression (e.g. item.active == true)", "de": "Bedingung", "fr": "Condition"}},
|
||||
{"name": "itemsPath", "type": "string", "required": False, "description": {"en": "Path to array", "de": "Pfad zum Array", "fr": "Chemin vers le tableau"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-filter", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.parseJson",
|
||||
"category": "data",
|
||||
"label": {"en": "Parse JSON", "de": "JSON parsen", "fr": "Parser JSON"},
|
||||
"description": {"en": "Parse JSON string to object", "de": "JSON-String in Objekt parsen", "fr": "Parser chaîne JSON en objet"},
|
||||
"parameters": [
|
||||
{"name": "jsonPath", "type": "string", "required": False, "description": {"en": "Path to JSON string (default: input)", "de": "Pfad zum JSON", "fr": "Chemin vers JSON"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-code-json", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.template",
|
||||
"category": "data",
|
||||
"label": {"en": "Template / Interpolation", "de": "Vorlage / Interpolation", "fr": "Modèle / Interpolation"},
|
||||
"description": {"en": "Text with {{placeholder}} substitution", "de": "Text mit {{platzhalter}}-Ersetzung", "fr": "Texte avec substitution {{placeholder}}"},
|
||||
"parameters": [
|
||||
{"name": "template", "type": "string", "required": True, "description": {"en": "Template (use {{path}} for values)", "de": "Vorlage", "fr": "Modèle"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-format-text", "color": "#673AB7"},
|
||||
},
|
||||
]
|
||||
60
modules/features/automation2/nodeDefinitions/file.py
Normal file
60
modules/features/automation2/nodeDefinitions/file.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# File node definitions - create files from context (e.g. from AI nodes).
|
||||
|
||||
FILE_NODES = [
|
||||
{
|
||||
"id": "file.create",
|
||||
"category": "file",
|
||||
"label": {"en": "Create File", "de": "Datei erstellen", "fr": "Créer fichier"},
|
||||
"description": {
|
||||
"en": "Create a file from context (text/markdown from AI). Configurable format and style.",
|
||||
"de": "Erstellt eine Datei aus Kontext (Text/Markdown von KI). Format und Stil konfigurierbar.",
|
||||
"fr": "Crée un fichier à partir du contexte. Format et style configurables.",
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "contentSources",
|
||||
"type": "json",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "Array of context refs (e.g. AI, form). Concatenated in order. Empty = from connected node.",
|
||||
"de": "Liste von Kontext-Quellen (z.B. KI, Formular). Werden nacheinander zusammengefügt. Leer = vom verbundenen Node.",
|
||||
"fr": "Liste de sources de contexte. Concaténées dans l'ordre. Vide = du noeud connecté.",
|
||||
},
|
||||
"default": [],
|
||||
},
|
||||
{
|
||||
"name": "outputFormat",
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"description": {"en": "Output format", "de": "Ausgabeformat", "fr": "Format de sortie"},
|
||||
"default": "docx",
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Document title", "de": "Dokumenttitel", "fr": "Titre du document"},
|
||||
},
|
||||
{
|
||||
"name": "templateName",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Style preset: default, corporate, minimal", "de": "Stil-Vorlage", "fr": "Prését style"},
|
||||
},
|
||||
{
|
||||
"name": "language",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Language code (de, en, fr)", "de": "Sprachcode", "fr": "Code langue"},
|
||||
"default": "de",
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3"},
|
||||
"_method": "file",
|
||||
"_action": "create",
|
||||
"_paramMap": {},
|
||||
},
|
||||
]
|
||||
|
|
@ -12,6 +12,7 @@ FLOW_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 2,
|
||||
"outputLabels": {"en": ["Yes", "No"], "de": ["Ja", "Nein"], "fr": ["Oui", "Non"]},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800"},
|
||||
},
|
||||
|
|
@ -29,19 +30,6 @@ FLOW_NODES = [
|
|||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.merge",
|
||||
"category": "flow",
|
||||
"label": {"en": "Merge", "de": "Zusammenführen", "fr": "Fusionner"},
|
||||
"description": {"en": "Merge multiple inputs", "de": "Mehrere Eingaben zusammenführen", "fr": "Fusionner plusieurs entrées"},
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "description": {"en": "append | combine", "de": "Modus", "fr": "Mode"}},
|
||||
],
|
||||
"inputs": 2,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-merge", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.loop",
|
||||
"category": "flow",
|
||||
|
|
@ -55,28 +43,4 @@ FLOW_NODES = [
|
|||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.wait",
|
||||
"category": "flow",
|
||||
"label": {"en": "Wait / Delay", "de": "Warten / Verzögerung", "fr": "Attendre / Délai"},
|
||||
"description": {"en": "Pause for duration", "de": "Pause für Dauer", "fr": "Pause pour durée"},
|
||||
"parameters": [
|
||||
{"name": "seconds", "type": "number", "required": True, "description": {"en": "Seconds to wait", "de": "Sekunden", "fr": "Secondes"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-timer", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.stop",
|
||||
"category": "flow",
|
||||
"label": {"en": "Stop / Terminate", "de": "Stopp / Beenden", "fr": "Arrêter / Terminer"},
|
||||
"description": {"en": "Stop workflow execution", "de": "Workflow-Ausführung beenden", "fr": "Arrêter l'exécution"},
|
||||
"parameters": [],
|
||||
"inputs": 1,
|
||||
"outputs": 0,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-stop", "color": "#F44336"},
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,11 @@ INPUT_NODES = [
|
|||
"name": "fields",
|
||||
"type": "json",
|
||||
"required": True,
|
||||
"description": {"en": "Form fields: [{name, type, label, required, options?}]", "de": "Formularfelder", "fr": "Champs du formulaire"},
|
||||
"description": {
|
||||
"en": "Form fields: [{name, type, label, required, options?}]. type may include clickup_tasks with clickupConnectionId + clickupListId for a ClickUp task dropdown (value {add, rem}).",
|
||||
"de": "Formularfelder. type: u. a. clickup_tasks mit clickupConnectionId und clickupListId für ClickUp-Aufgaben-Dropdown (Wert wie Relationship-Feld).",
|
||||
"fr": "Champs du formulaire",
|
||||
},
|
||||
"default": [],
|
||||
},
|
||||
],
|
||||
|
|
@ -42,7 +46,8 @@ INPUT_NODES = [
|
|||
"label": {"en": "Upload", "de": "Upload", "fr": "Téléversement"},
|
||||
"description": {"en": "User uploads file(s)", "de": "Benutzer lädt Datei(en) hoch", "fr": "L'utilisateur téléverse des fichiers"},
|
||||
"parameters": [
|
||||
{"name": "accept", "type": "string", "required": False, "description": {"en": "MIME types (e.g. .pdf,image/*)", "de": "MIME-Typen", "fr": "Types MIME"}, "default": ""},
|
||||
{"name": "accept", "type": "string", "required": False, "description": {"en": "Accept string for file input (e.g. .pdf,image/*)", "de": "Accept-String für Dateiauswahl", "fr": "Chaîne accept"}, "default": ""},
|
||||
{"name": "allowedTypes", "type": "json", "required": False, "description": {"en": "Selected file types (from UI multi-select)", "de": "Ausgewählte Dateitypen", "fr": "Types sélectionnés"}, "default": []},
|
||||
{"name": "maxSize", "type": "number", "required": False, "description": {"en": "Max file size in MB", "de": "Max. Dateigröße in MB", "fr": "Taille max en Mo"}, "default": 10},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "description": {"en": "Allow multiple files", "de": "Mehrere Dateien erlauben", "fr": "Autoriser plusieurs fichiers"}, "default": False},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Trigger node definitions - workflow entry points.
|
||||
# Canvas start nodes — variant reflects workflow configuration (gear in editor).
|
||||
|
||||
TRIGGER_NODES = [
|
||||
{
|
||||
"id": "trigger.manual",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Manual Trigger", "de": "Manueller Trigger", "fr": "Déclencheur manuel"},
|
||||
"description": {"en": "Start workflow on button press", "de": "Startet den Workflow bei Knopfdruck", "fr": "Démarre le workflow sur clic"},
|
||||
"label": {"en": "Start", "de": "Start", "fr": "Départ"},
|
||||
"description": {
|
||||
"en": "Manual, API, or background triggers (webhook, email, …).",
|
||||
"de": "Manuell, API oder Hintergrund-Starts (Webhook, E-Mail, …).",
|
||||
"fr": "Manuel, API ou déclencheurs en arrière-plan.",
|
||||
},
|
||||
"parameters": [],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
|
|
@ -14,29 +18,47 @@ TRIGGER_NODES = [
|
|||
"meta": {"icon": "mdi-play", "color": "#4CAF50"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.schedule",
|
||||
"id": "trigger.form",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Schedule", "de": "Zeitplan", "fr": "Planification"},
|
||||
"description": {"en": "Run on a cron schedule", "de": "Läuft nach Cron-Zeitplan", "fr": "S'exécute selon un cron"},
|
||||
"parameters": [
|
||||
{"name": "cron", "type": "string", "required": True, "description": {"en": "Cron expression (e.g. 0 9 * * * for daily at 9)", "de": "Cron-Ausdruck", "fr": "Expression cron"}},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
|
||||
"label": {"en": "Start (form)", "de": "Start (Formular)", "fr": "Départ (formulaire)"},
|
||||
"description": {
|
||||
"en": "Form fields are filled at run time; configure fields on this node.",
|
||||
"de": "Felder werden beim Start befüllt; konfigurieren Sie die Felder auf dieser Node.",
|
||||
"fr": "Les champs sont remplis au démarrage.",
|
||||
},
|
||||
{
|
||||
"id": "trigger.formSubmit",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Form Submit", "de": "Formular-Absendung", "fr": "Soumission formulaire"},
|
||||
"description": {"en": "Start when form is submitted", "de": "Startet bei Formular-Absendung", "fr": "Démarre à la soumission du formulaire"},
|
||||
"parameters": [
|
||||
{"name": "formId", "type": "string", "required": True, "description": {"en": "Form identifier", "de": "Formular-ID", "fr": "Identifiant du formulaire"}},
|
||||
{
|
||||
"name": "formFields",
|
||||
"type": "json",
|
||||
"required": False,
|
||||
"description": {"en": "Field definitions", "de": "Felddefinitionen", "fr": "Définitions"},
|
||||
},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.schedule",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Start (schedule)", "de": "Start (Zeitplan)", "fr": "Départ (planification)"},
|
||||
"description": {
|
||||
"en": "Cron expression for scheduled runs (configure on this node).",
|
||||
"de": "Cron-Ausdruck für geplante Läufe.",
|
||||
"fr": "Expression cron pour les exécutions planifiées.",
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "cron",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Cron expression", "de": "Cron-Ausdruck", "fr": "Expression cron"},
|
||||
},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
|||
out["label"] = node["label"].get(lang, node["label"].get("en", str(node["label"])))
|
||||
if isinstance(node.get("description"), dict):
|
||||
out["description"] = node["description"].get(lang, node["description"].get("en", str(node["description"])))
|
||||
ol = node.get("outputLabels")
|
||||
if isinstance(ol, dict) and ol:
|
||||
first = next(iter(ol.values()), None)
|
||||
if isinstance(first, (list, tuple)):
|
||||
out["outputLabels"] = ol.get(lang, ol.get("en", list(first)))
|
||||
params = []
|
||||
for p in node.get("parameters", []):
|
||||
pc = dict(p)
|
||||
|
|
@ -61,8 +66,10 @@ def getNodeTypesForApi(
|
|||
{"id": "flow", "label": {"en": "Flow", "de": "Ablauf", "fr": "Flux"}},
|
||||
{"id": "data", "label": {"en": "Data", "de": "Daten", "fr": "Données"}},
|
||||
{"id": "ai", "label": {"en": "AI", "de": "KI", "fr": "IA"}},
|
||||
{"id": "file", "label": {"en": "File", "de": "Datei", "fr": "Fichier"}},
|
||||
{"id": "email", "label": {"en": "Email", "de": "E-Mail", "fr": "Email"}},
|
||||
{"id": "sharepoint", "label": {"en": "SharePoint", "de": "SharePoint", "fr": "SharePoint"}},
|
||||
{"id": "clickup", "label": {"en": "ClickUp", "de": "ClickUp", "fr": "ClickUp"}},
|
||||
]
|
||||
return {"nodeTypes": localized, "categories": categories}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ Automation2 routes - node-types, execute, workflows, runs, tasks, connections, b
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
|
|
@ -13,9 +15,75 @@ from modules.features.automation2.mainAutomation2 import getAutomation2Services
|
|||
from modules.features.automation2.nodeRegistry import getNodeTypesForApi
|
||||
from modules.features.automation2.interfaceFeatureAutomation2 import getAutomation2Interface
|
||||
from modules.workflows.automation2.executionEngine import executeGraph
|
||||
from modules.workflows.automation2.runEnvelope import (
|
||||
default_run_envelope,
|
||||
merge_run_envelope,
|
||||
normalize_run_envelope,
|
||||
)
|
||||
from modules.features.automation2.entryPoints import find_invocation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _build_execute_run_envelope(
|
||||
body: Dict[str, Any],
|
||||
workflow: Optional[Dict[str, Any]],
|
||||
user_id: Optional[str],
|
||||
) -> Dict[str, Any]:
|
||||
"""Build normalized run envelope from POST /execute body."""
|
||||
if isinstance(body.get("runEnvelope"), dict):
|
||||
env = normalize_run_envelope(body["runEnvelope"], user_id=user_id)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
env = merge_run_envelope(env, {"payload": pl})
|
||||
return env
|
||||
|
||||
entry_point_id = body.get("entryPointId")
|
||||
if entry_point_id:
|
||||
if not workflow:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="entryPointId requires a saved workflow (workflowId must refer to a stored workflow)",
|
||||
)
|
||||
inv = find_invocation(workflow, entry_point_id)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=400, detail="entryPointId not found on workflow")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="entry point is disabled")
|
||||
kind = inv.get("kind", "manual")
|
||||
trig_map = {
|
||||
"manual": "manual",
|
||||
"form": "form",
|
||||
"schedule": "schedule",
|
||||
"always_on": "event",
|
||||
"email": "email",
|
||||
"webhook": "webhook",
|
||||
"api": "api",
|
||||
"event": "event",
|
||||
}
|
||||
trig = trig_map.get(kind, "manual")
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
base = default_run_envelope(
|
||||
trig,
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
base = merge_run_envelope(base, {"payload": pl})
|
||||
return normalize_run_envelope(base, user_id=user_id)
|
||||
|
||||
env = normalize_run_envelope(None, user_id=user_id)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
env = merge_run_envelope(env, {"payload": pl})
|
||||
return env
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/automation2",
|
||||
tags=["Automation2"],
|
||||
|
|
@ -55,6 +123,26 @@ def get_automation2_info(
|
|||
}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/schedule-sync")
|
||||
@limiter.limit("10/minute")
|
||||
def post_schedule_sync(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Manually trigger schedule sync (re-register cron jobs for all schedule workflows)."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.workflows.automation2.subAutomation2Schedule import sync_automation2_schedule_events
|
||||
|
||||
root = getRootInterface()
|
||||
event_user = root.getUserByUsername("event")
|
||||
if not event_user:
|
||||
return {"success": False, "error": "Event user not available", "synced": 0}
|
||||
result = sync_automation2_schedule_events(event_user)
|
||||
return {"success": True, **result}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/node-types")
|
||||
@limiter.limit("60/minute")
|
||||
def get_node_types(
|
||||
|
|
@ -109,6 +197,10 @@ async def post_execute(
|
|||
graph = body.get("graph") or body
|
||||
workflowId = body.get("workflowId")
|
||||
req_nodes = graph.get("nodes") or []
|
||||
workflow_for_envelope: Optional[Dict[str, Any]] = None
|
||||
if workflowId and not str(workflowId).startswith("transient-"):
|
||||
a2_pre = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
workflow_for_envelope = a2_pre.getWorkflow(workflowId)
|
||||
# When workflowId is set: prefer graph from request (current editor state) if it has nodes.
|
||||
# Only fall back to stored workflow graph when request graph is empty (e.g. resume from email).
|
||||
if workflowId and len(req_nodes) == 0:
|
||||
|
|
@ -117,6 +209,7 @@ async def post_execute(
|
|||
if wf and wf.get("graph"):
|
||||
graph = wf["graph"]
|
||||
logger.info("automation2 execute: loaded graph from workflow %s", workflowId)
|
||||
workflow_for_envelope = wf
|
||||
# Use transient workflowId when none provided (e.g. execute from editor without save)
|
||||
# Required for email.checkEmail pause/resume - run must be created
|
||||
if not workflowId:
|
||||
|
|
@ -132,6 +225,8 @@ async def post_execute(
|
|||
workflowId,
|
||||
mandateId,
|
||||
)
|
||||
run_env = _build_execute_run_envelope(body, workflow_for_envelope, userId)
|
||||
|
||||
a2_interface = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
result = await executeGraph(
|
||||
graph=graph,
|
||||
|
|
@ -141,6 +236,7 @@ async def post_execute(
|
|||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2_interface,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
logger.info(
|
||||
"automation2 execute result: success=%s error=%s nodeOutputs_keys=%s failedNode=%s paused=%s",
|
||||
|
|
@ -239,6 +335,7 @@ async def list_connection_services(
|
|||
services = provider.getAvailableServices()
|
||||
_serviceLabels = {
|
||||
"sharepoint": "SharePoint",
|
||||
"clickup": "ClickUp",
|
||||
"outlook": "Outlook",
|
||||
"teams": "Teams",
|
||||
"onedrive": "OneDrive",
|
||||
|
|
@ -248,6 +345,7 @@ async def list_connection_services(
|
|||
}
|
||||
_serviceIcons = {
|
||||
"sharepoint": "sharepoint",
|
||||
"clickup": "folder",
|
||||
"outlook": "mail",
|
||||
"teams": "chat",
|
||||
"onedrive": "cloud",
|
||||
|
|
@ -342,15 +440,17 @@ def _get_node_label_from_graph(graph: dict, nodeId: str) -> str:
|
|||
def get_workflows(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""List all workflows for this feature instance.
|
||||
Enriches each workflow with runCount, isRunning, stuckAtNodeId, stuckAtNodeLabel,
|
||||
createdAt, lastStartedAt.
|
||||
Query param active: filter by active status (true|false).
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
items = a2.getWorkflows()
|
||||
items = a2.getWorkflows(active=active)
|
||||
enriched = []
|
||||
for wf in items:
|
||||
wf_id = wf.get("id")
|
||||
|
|
@ -359,7 +459,7 @@ def get_workflows(
|
|||
active_run = None
|
||||
last_started_at = None
|
||||
for r in runs:
|
||||
ts = r.get("_createdAt")
|
||||
ts = r.get("sysCreatedAt")
|
||||
if ts and (last_started_at is None or ts > last_started_at):
|
||||
last_started_at = ts
|
||||
if r.get("status") in ("running", "paused"):
|
||||
|
|
@ -375,7 +475,7 @@ def get_workflows(
|
|||
"runStatus": active_run.get("status") if active_run else None,
|
||||
"stuckAtNodeId": stuck_at_node_id,
|
||||
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
||||
"createdAt": wf.get("_createdAt"),
|
||||
"createdAt": wf.get("sysCreatedAt"),
|
||||
"lastStartedAt": last_started_at,
|
||||
})
|
||||
return {"workflows": enriched}
|
||||
|
|
@ -447,11 +547,163 @@ def delete_workflow(
|
|||
return {"success": True}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/workflows/{workflowId}/webhooks/{entryPointId}")
|
||||
@limiter.limit("60/minute")
|
||||
async def post_workflow_webhook(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
workflowId: str = Path(..., description="Workflow ID"),
|
||||
entryPointId: str = Path(..., description="Entry point ID (kind must be webhook)"),
|
||||
body: dict = Body(default_factory=dict),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""
|
||||
Invoke a workflow via a webhook entry point. Optional shared secret in
|
||||
X-Automation2-Webhook-Secret or X-Webhook-Secret when config.webhookSecret is set.
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
userId = str(context.user.id) if context.user else None
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
wf = a2.getWorkflow(workflowId)
|
||||
if not wf or not wf.get("graph"):
|
||||
raise HTTPException(status_code=404, detail="Workflow not found")
|
||||
inv = find_invocation(wf, entryPointId)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=404, detail="Entry point not found")
|
||||
if inv.get("kind") != "webhook":
|
||||
raise HTTPException(status_code=400, detail="Entry point is not a webhook")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="Entry point is disabled")
|
||||
cfg = inv.get("config") or {}
|
||||
secret = cfg.get("webhookSecret")
|
||||
if secret:
|
||||
hdr = request.headers.get("X-Automation2-Webhook-Secret") or request.headers.get(
|
||||
"X-Webhook-Secret"
|
||||
)
|
||||
if hdr != str(secret):
|
||||
raise HTTPException(status_code=403, detail="Invalid webhook secret")
|
||||
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
|
||||
|
||||
discoverMethods(services)
|
||||
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
pl = body if isinstance(body, dict) else {}
|
||||
base = default_run_envelope(
|
||||
"webhook",
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
payload=pl,
|
||||
raw={"httpBody": body},
|
||||
)
|
||||
run_env = normalize_run_envelope(base, user_id=userId)
|
||||
|
||||
result = await executeGraph(
|
||||
graph=wf["graph"],
|
||||
services=services,
|
||||
workflowId=workflowId,
|
||||
instanceId=instanceId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/{instanceId}/workflows/{workflowId}/forms/{entryPointId}/submit")
|
||||
@limiter.limit("60/minute")
|
||||
async def post_workflow_form_submit(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
workflowId: str = Path(..., description="Workflow ID"),
|
||||
entryPointId: str = Path(..., description="Entry point ID (kind must be form)"),
|
||||
body: dict = Body(default_factory=dict),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Form-style submit: same as execute with trigger.type form and payload from body."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
userId = str(context.user.id) if context.user else None
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
wf = a2.getWorkflow(workflowId)
|
||||
if not wf or not wf.get("graph"):
|
||||
raise HTTPException(status_code=404, detail="Workflow not found")
|
||||
inv = find_invocation(wf, entryPointId)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=404, detail="Entry point not found")
|
||||
if inv.get("kind") != "form":
|
||||
raise HTTPException(status_code=400, detail="Entry point is not a form")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="Entry point is disabled")
|
||||
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
|
||||
|
||||
discoverMethods(services)
|
||||
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
pl = body if isinstance(body, dict) else {}
|
||||
base = default_run_envelope(
|
||||
"form",
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
payload=pl,
|
||||
raw={"formBody": body},
|
||||
)
|
||||
run_env = normalize_run_envelope(base, user_id=userId)
|
||||
|
||||
result = await executeGraph(
|
||||
graph=wf["graph"],
|
||||
services=services,
|
||||
workflowId=workflowId,
|
||||
instanceId=instanceId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Runs and Resume
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
|
||||
@router.get("/{instanceId}/runs/completed")
|
||||
@limiter.limit("60/minute")
|
||||
def get_completed_runs(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
limit: int = Query(20, ge=1, le=50),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Get recently completed runs with output (for Tasks page output section)."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
runs = a2.getRecentCompletedRuns(limit=limit)
|
||||
return {"runs": runs}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/workflows/{workflowId}/runs")
|
||||
@limiter.limit("60/minute")
|
||||
def get_workflow_runs(
|
||||
|
|
@ -536,7 +788,7 @@ def get_tasks(
|
|||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Get tasks - by default those assigned to current user, or all if no assignee filter.
|
||||
Enriches each task with workflowLabel and createdAt (_createdAt).
|
||||
Enriches each task with workflowLabel and createdAt (from sysCreatedAt).
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
|
|
@ -549,7 +801,7 @@ def get_tasks(
|
|||
enriched.append({
|
||||
**t,
|
||||
"workflowLabel": wf.get("label", t.get("workflowId", "")) if wf else t.get("workflowId", ""),
|
||||
"createdAt": t.get("_createdAt"),
|
||||
"createdAt": t.get("sysCreatedAt"),
|
||||
})
|
||||
return {"tasks": enriched}
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ from modules.datamodels.datamodelRbac import AccessRuleContext
|
|||
from modules.datamodels.datamodelUam import AccessLevel
|
||||
|
||||
from modules.datamodels.datamodelChat import UserInputRequest
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
|
||||
|
||||
# =============================================================================
|
||||
|
|
@ -27,7 +28,7 @@ from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
|
|||
# =============================================================================
|
||||
|
||||
|
||||
class ChatbotDocument(BaseModel):
|
||||
class ChatbotDocument(PowerOnModel):
|
||||
"""Documents attached to chatbot messages."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
messageId: str = Field(description="Foreign key to message")
|
||||
|
|
@ -41,7 +42,7 @@ class ChatbotDocument(BaseModel):
|
|||
actionId: Optional[str] = Field(None, description="ID of the action that created this document")
|
||||
|
||||
|
||||
class ChatbotMessage(BaseModel):
|
||||
class ChatbotMessage(PowerOnModel):
|
||||
"""Messages in chatbot conversations. Must match bridge format in memory.py."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
conversationId: str = Field(description="Foreign key to conversation")
|
||||
|
|
@ -64,7 +65,7 @@ class ChatbotMessage(BaseModel):
|
|||
actionProgress: Optional[str] = Field(None, description="Action progress status")
|
||||
|
||||
|
||||
class ChatbotLog(BaseModel):
|
||||
class ChatbotLog(PowerOnModel):
|
||||
"""Log entries for chatbot conversations."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
conversationId: str = Field(description="Foreign key to conversation")
|
||||
|
|
@ -85,7 +86,7 @@ class ChatbotWorkflowModeEnum(str, Enum):
|
|||
WORKFLOW_CHATBOT = "Chatbot"
|
||||
|
||||
|
||||
class ChatbotConversation(BaseModel):
|
||||
class ChatbotConversation(PowerOnModel):
|
||||
"""Chatbot conversation container. Per feature-instance isolation via featureInstanceId."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
featureInstanceId: str = Field(description="Feature instance ID for per-instance isolation")
|
||||
|
|
@ -328,9 +329,8 @@ class ChatObjects:
|
|||
objectFields[fieldName] = value
|
||||
else:
|
||||
# Field not in model - treat as scalar if simple, otherwise filter out
|
||||
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
|
||||
# Underscore-prefixed keys (e.g. UI meta) pass through; sys* live on PowerOnModel subclasses
|
||||
if fieldName.startswith("_"):
|
||||
# Metadata fields should be passed through to connector
|
||||
simpleFields[fieldName] = value
|
||||
elif isinstance(value, (str, int, float, bool, type(None))):
|
||||
simpleFields[fieldName] = value
|
||||
|
|
|
|||
|
|
@ -1222,11 +1222,9 @@ def _preflight_billing_check(services, mandateId: str, featureInstanceId: Option
|
|||
balanceCheck = billingService.checkBalance(0.01)
|
||||
if not balanceCheck.allowed:
|
||||
mid = str(getattr(services, "mandateId", None) or mandateId or "")
|
||||
from modules.datamodels.datamodelBilling import BillingModelEnum
|
||||
from modules.serviceCenter.services.serviceBilling.billingExhaustedNotify import (
|
||||
maybeEmailMandatePoolExhausted,
|
||||
)
|
||||
if balanceCheck.billingModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
u = getattr(services, "user", None)
|
||||
ulabel = (
|
||||
(getattr(u, "email", None) or getattr(u, "username", None) or str(getattr(u, "id", "")))
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ Pydantic models for coaching contexts, sessions, messages, tasks, scores, and us
|
|||
from typing import Optional, List, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
import uuid
|
||||
|
||||
|
||||
|
|
@ -73,7 +75,7 @@ class CoachingScoreTrend(str, Enum):
|
|||
# Database Models
|
||||
# ============================================================================
|
||||
|
||||
class CoachingContext(BaseModel):
|
||||
class CoachingContext(PowerOnModel):
|
||||
"""A coaching context/dossier representing a topic the user is working on."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str = Field(description="Owner user ID (strict ownership)")
|
||||
|
|
@ -91,11 +93,9 @@ class CoachingContext(BaseModel):
|
|||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
||||
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
updatedAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class CoachingSession(BaseModel):
|
||||
class CoachingSession(PowerOnModel):
|
||||
"""A single coaching conversation session within a context."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
|
|
@ -115,11 +115,9 @@ class CoachingSession(BaseModel):
|
|||
emailSent: bool = Field(default=False)
|
||||
startedAt: Optional[str] = Field(default=None)
|
||||
endedAt: Optional[str] = Field(default=None)
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
updatedAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class CoachingMessage(BaseModel):
|
||||
class CoachingMessage(PowerOnModel):
|
||||
"""A single message in a coaching session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
sessionId: str = Field(description="FK to CoachingSession")
|
||||
|
|
@ -130,10 +128,9 @@ class CoachingMessage(BaseModel):
|
|||
contentType: CoachingMessageContentType = Field(default=CoachingMessageContentType.TEXT)
|
||||
audioRef: Optional[str] = Field(default=None, description="Reference to audio file")
|
||||
metadata: Optional[str] = Field(default=None, description="JSON: token count, voice info, etc.")
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class CoachingTask(BaseModel):
|
||||
class CoachingTask(PowerOnModel):
|
||||
"""A task/checklist item assigned within a coaching context."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
|
|
@ -146,11 +143,9 @@ class CoachingTask(BaseModel):
|
|||
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
||||
dueDate: Optional[str] = Field(default=None)
|
||||
completedAt: Optional[str] = Field(default=None)
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
updatedAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class CoachingScore(BaseModel):
|
||||
class CoachingScore(PowerOnModel):
|
||||
"""A competence score for a dimension, recorded after a session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
|
|
@ -161,17 +156,14 @@ class CoachingScore(BaseModel):
|
|||
score: float = Field(ge=0.0, le=100.0)
|
||||
trend: CoachingScoreTrend = Field(default=CoachingScoreTrend.STABLE)
|
||||
evidence: Optional[str] = Field(default=None, description="AI reasoning for the score")
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
class CoachingUserProfile(BaseModel):
|
||||
class CoachingUserProfile(PowerOnModel):
|
||||
"""Per-user coaching profile and preferences."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str = Field(description="Owner user ID")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
preferredLanguage: str = Field(default="de-DE")
|
||||
preferredVoice: Optional[str] = Field(default=None, description="Google TTS voice name")
|
||||
dailyReminderTime: Optional[str] = Field(default=None, description="HH:MM format")
|
||||
dailyReminderEnabled: bool = Field(default=False)
|
||||
emailSummaryEnabled: bool = Field(default=True)
|
||||
|
|
@ -180,15 +172,13 @@ class CoachingUserProfile(BaseModel):
|
|||
totalSessions: int = Field(default=0)
|
||||
totalMinutes: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
updatedAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Iteration 2: Personas
|
||||
# ============================================================================
|
||||
|
||||
class CoachingPersona(BaseModel):
|
||||
class CoachingPersona(PowerOnModel):
|
||||
"""A roleplay persona for coaching sessions."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str = Field(description="Owner user ID ('system' for builtins)")
|
||||
|
|
@ -201,35 +191,13 @@ class CoachingPersona(BaseModel):
|
|||
gender: Optional[str] = Field(default=None, description="m or f")
|
||||
category: str = Field(default="builtin", description="'builtin' or 'custom'")
|
||||
isActive: bool = Field(default=True)
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
updatedAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Iteration 2: Documents
|
||||
# ============================================================================
|
||||
|
||||
class CoachingDocument(BaseModel):
|
||||
"""A document attached to a coaching context."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: Optional[str] = Field(default=None)
|
||||
fileName: str = Field(description="Original file name")
|
||||
mimeType: str = Field(default="application/octet-stream")
|
||||
fileSize: int = Field(default=0)
|
||||
extractedText: Optional[str] = Field(default=None, description="Text content extracted from file")
|
||||
summary: Optional[str] = Field(default=None, description="AI-generated summary")
|
||||
fileRef: Optional[str] = Field(default=None, description="Reference to file in storage")
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Iteration 2: Badges / Gamification
|
||||
# ============================================================================
|
||||
|
||||
class CoachingBadge(BaseModel):
|
||||
class CoachingBadge(PowerOnModel):
|
||||
"""An achievement badge awarded to a user."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str = Field(description="Owner user ID")
|
||||
|
|
@ -237,7 +205,6 @@ class CoachingBadge(BaseModel):
|
|||
instanceId: str = Field(description="Feature instance ID")
|
||||
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
||||
awardedAt: Optional[str] = Field(default=None)
|
||||
createdAt: Optional[str] = Field(default=None)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -261,6 +228,10 @@ class UpdateContextRequest(BaseModel):
|
|||
class SendMessageRequest(BaseModel):
|
||||
content: str = Field(description="User message text")
|
||||
contentType: Optional[CoachingMessageContentType] = CoachingMessageContentType.TEXT
|
||||
fileIds: Optional[List[str]] = Field(default=None, description="Attached file IDs for agent context")
|
||||
dataSourceIds: Optional[List[str]] = Field(default=None, description="Personal data source IDs")
|
||||
featureDataSourceIds: Optional[List[str]] = Field(default=None, description="Feature data source IDs")
|
||||
allowedProviders: Optional[List[str]] = Field(default=None, description="Allowed AI providers")
|
||||
|
||||
|
||||
class CreateTaskRequest(BaseModel):
|
||||
|
|
@ -282,8 +253,6 @@ class UpdateTaskStatusRequest(BaseModel):
|
|||
|
||||
|
||||
class UpdateProfileRequest(BaseModel):
|
||||
preferredLanguage: Optional[str] = None
|
||||
preferredVoice: Optional[str] = None
|
||||
dailyReminderTime: Optional[str] = None
|
||||
dailyReminderEnabled: Optional[bool] = None
|
||||
emailSummaryEnabled: Optional[bool] = None
|
||||
|
|
|
|||
|
|
@ -269,34 +269,6 @@ class CommcoachObjects:
|
|||
from .datamodelCommcoach import CoachingPersona
|
||||
return self.db.recordDelete(CoachingPersona, personaId)
|
||||
|
||||
# =========================================================================
|
||||
# Documents
|
||||
# =========================================================================
|
||||
|
||||
def getDocuments(self, contextId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingDocument
|
||||
records = self.db.getRecordset(CoachingDocument, recordFilter={"contextId": contextId, "userId": userId})
|
||||
records.sort(key=lambda r: r.get("createdAt") or "", reverse=True)
|
||||
return records
|
||||
|
||||
def getDocument(self, documentId: str) -> Optional[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingDocument
|
||||
records = self.db.getRecordset(CoachingDocument, recordFilter={"id": documentId})
|
||||
return records[0] if records else None
|
||||
|
||||
def createDocument(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
from .datamodelCommcoach import CoachingDocument
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingDocument, data)
|
||||
|
||||
def updateDocument(self, documentId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingDocument
|
||||
return self.db.recordModify(CoachingDocument, documentId, updates)
|
||||
|
||||
def deleteDocument(self, documentId: str) -> bool:
|
||||
from .datamodelCommcoach import CoachingDocument
|
||||
return self.db.recordDelete(CoachingDocument, documentId)
|
||||
|
||||
# =========================================================================
|
||||
# Badges
|
||||
# =========================================================================
|
||||
|
|
|
|||
|
|
@ -36,12 +36,22 @@ DATA_OBJECTS = [
|
|||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingContext",
|
||||
"label": {"en": "Coaching Context", "de": "Coaching-Kontext", "fr": "Contexte coaching"},
|
||||
"meta": {"table": "CoachingContext", "fields": ["id", "title", "category", "status"]}
|
||||
"meta": {
|
||||
"table": "CoachingContext",
|
||||
"fields": ["id", "title", "category", "status"],
|
||||
"isParent": True,
|
||||
"displayFields": ["title", "category", "status"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingSession",
|
||||
"label": {"en": "Coaching Session", "de": "Coaching-Session", "fr": "Session coaching"},
|
||||
"meta": {"table": "CoachingSession", "fields": ["id", "contextId", "status", "summary"]}
|
||||
"meta": {
|
||||
"table": "CoachingSession",
|
||||
"fields": ["id", "contextId", "status", "summary"],
|
||||
"parentTable": "CoachingContext",
|
||||
"parentKey": "contextId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingMessage",
|
||||
|
|
@ -51,7 +61,12 @@ DATA_OBJECTS = [
|
|||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingTask",
|
||||
"label": {"en": "Coaching Task", "de": "Coaching-Aufgabe", "fr": "Tache coaching"},
|
||||
"meta": {"table": "CoachingTask", "fields": ["id", "contextId", "title", "status"]}
|
||||
"meta": {
|
||||
"table": "CoachingTask",
|
||||
"fields": ["id", "contextId", "title", "status"],
|
||||
"parentTable": "CoachingContext",
|
||||
"parentKey": "contextId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingScore",
|
||||
|
|
@ -61,18 +76,13 @@ DATA_OBJECTS = [
|
|||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingUserProfile",
|
||||
"label": {"en": "User Profile", "de": "Benutzerprofil", "fr": "Profil utilisateur"},
|
||||
"meta": {"table": "CoachingUserProfile", "fields": ["id", "userId", "preferredLanguage"]}
|
||||
"meta": {"table": "CoachingUserProfile", "fields": ["id", "userId", "dailyReminderEnabled"]}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingPersona",
|
||||
"label": {"en": "Coaching Persona", "de": "Coaching-Persona", "fr": "Persona coaching"},
|
||||
"meta": {"table": "CoachingPersona", "fields": ["id", "key", "label", "gender"]}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingDocument",
|
||||
"label": {"en": "Coaching Document", "de": "Coaching-Dokument", "fr": "Document coaching"},
|
||||
"meta": {"table": "CoachingDocument", "fields": ["id", "contextId", "fileName"]}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingBadge",
|
||||
"label": {"en": "Coaching Badge", "de": "Coaching-Auszeichnung", "fr": "Badge coaching"},
|
||||
|
|
@ -114,12 +124,27 @@ RESOURCE_OBJECTS = [
|
|||
]
|
||||
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "commcoach-viewer",
|
||||
"description": {
|
||||
"en": "Communication Coach Viewer - View coaching data (read-only)",
|
||||
"de": "Kommunikations-Coach Betrachter - Coaching-Daten ansehen (nur lesen)",
|
||||
"fr": "Visualiseur Coach Communication - Consulter les donnees coaching (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.commcoach.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.coaching", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.dossier", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.settings", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "commcoach-user",
|
||||
"description": {
|
||||
"en": "Communication Coach User - Can manage own coaching contexts and sessions",
|
||||
"de": "Kommunikations-Coach Benutzer - Kann eigene Coaching-Kontexte und Sessions verwalten",
|
||||
"fr": "Utilisateur Coach Communication - Peut gerer ses propres contextes et sessions"
|
||||
"fr": "Utilisateur Coach Communication - Peut gerer ses propres contextes et sessions",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.commcoach.dashboard", "view": True},
|
||||
|
|
@ -137,7 +162,20 @@ TEMPLATE_ROLES = [
|
|||
{"context": "RESOURCE", "item": "resource.feature.commcoach.session.start", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.session.complete", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.task.manage", "view": True},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "commcoach-admin",
|
||||
"description": {
|
||||
"en": "Communication Coach Admin - All UI and API actions; data scoped to own records",
|
||||
"de": "Kommunikations-Coach Admin - Alle UI- und API-Aktionen; Daten nur eigene Datensaetze",
|
||||
"fr": "Administrateur Coach Communication - Toute l'UI et les API; donnees propres",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
{"context": "RESOURCE", "item": None, "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
@ -147,7 +185,7 @@ def getFeatureDefinition() -> Dict[str, Any]:
|
|||
"code": FEATURE_CODE,
|
||||
"label": FEATURE_LABEL,
|
||||
"icon": FEATURE_ICON,
|
||||
"autoCreateInstance": True,
|
||||
"autoCreateInstance": False,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# All rights reserved.
|
||||
"""
|
||||
CommCoach routes for the backend API.
|
||||
Implements coaching context management, session streaming, tasks, dashboard, and voice endpoints.
|
||||
Implements coaching context management, session streaming, tasks, and dashboard.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
|
@ -26,7 +26,7 @@ from .datamodelCommcoach import (
|
|||
CoachingContext, CoachingContextStatus, CoachingSession, CoachingSessionStatus,
|
||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||
CoachingTask, CoachingTaskStatus,
|
||||
CoachingPersona, CoachingDocument, CoachingBadge,
|
||||
CoachingPersona, CoachingBadge,
|
||||
CreateContextRequest, UpdateContextRequest,
|
||||
SendMessageRequest, CreateTaskRequest, UpdateTaskRequest, UpdateTaskStatusRequest,
|
||||
UpdateProfileRequest,
|
||||
|
|
@ -334,10 +334,8 @@ async def startSession(
|
|||
try:
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
profile = interface.getProfile(userId, instanceId)
|
||||
language = profile.get("preferredLanguage", "de-DE") if profile else "de-DE"
|
||||
voiceName = profile.get("preferredVoice") if profile else None
|
||||
from .serviceCommcoach import _stripMarkdownForTts
|
||||
from .serviceCommcoach import _getUserVoicePrefs, _stripMarkdownForTts, _buildTtsConfigErrorMessage
|
||||
language, voiceName = _getUserVoicePrefs(userId, mandateId)
|
||||
ttsResult = await voiceInterface.textToSpeech(
|
||||
text=_stripMarkdownForTts(greetingText),
|
||||
languageCode=language,
|
||||
|
|
@ -350,8 +348,12 @@ async def startSession(
|
|||
audioBytes if isinstance(audioBytes, bytes) else audioBytes.encode()
|
||||
).decode()
|
||||
yield f"data: {json.dumps({'type': 'ttsAudio', 'data': {'audio': audioB64, 'format': 'mp3'}})}\n\n"
|
||||
else:
|
||||
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
|
||||
yield f"data: {json.dumps({'type': 'error', 'data': {'message': _buildTtsConfigErrorMessage(language, voiceName, errorDetail), 'detail': errorDetail, 'ttsLanguage': language, 'ttsVoice': voiceName}})}\n\n"
|
||||
except Exception as e:
|
||||
logger.warning(f"TTS failed for resumed session: {e}")
|
||||
yield f"data: {json.dumps({'type': 'error', 'data': {'message': 'Die konfigurierte Stimme für diese Sprache ist ungültig oder nicht verfügbar. Bitte passe sie unter Einstellungen > Stimme & Sprache an.', 'detail': str(e)}})}\n\n"
|
||||
yield f"data: {json.dumps({'type': 'complete', 'data': {}, 'timestamp': getIsoTimestamp()})}\n\n"
|
||||
|
||||
return StreamingResponse(
|
||||
|
|
@ -512,7 +514,13 @@ async def sendMessageStream(
|
|||
_activeProcessTasks.pop(sessionId, None)
|
||||
|
||||
task = asyncio.create_task(
|
||||
service.processMessage(sessionId, contextId, body.content, interface)
|
||||
service.processMessage(
|
||||
sessionId, contextId, body.content, interface,
|
||||
fileIds=body.fileIds,
|
||||
dataSourceIds=body.dataSourceIds,
|
||||
featureDataSourceIds=body.featureDataSourceIds,
|
||||
allowedProviders=body.allowedProviders,
|
||||
)
|
||||
)
|
||||
task.add_done_callback(_onTaskDone)
|
||||
_activeProcessTasks[sessionId] = task
|
||||
|
|
@ -574,8 +582,8 @@ async def sendAudioStream(
|
|||
if not audioBody:
|
||||
raise HTTPException(status_code=400, detail="No audio data received")
|
||||
|
||||
profile = interface.getProfile(str(context.user.id), instanceId)
|
||||
language = profile.get("preferredLanguage", "de-DE") if profile else "de-DE"
|
||||
from .serviceCommcoach import _getUserVoicePrefs
|
||||
language, _ = _getUserVoicePrefs(str(context.user.id), mandateId)
|
||||
|
||||
contextId = session.get("contextId")
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
|
|
@ -839,73 +847,6 @@ async def updateProfile(
|
|||
return {"profile": updated}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Voice Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/voice/languages")
|
||||
@limiter.limit("30/minute")
|
||||
async def getVoiceLanguages(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
languagesResult = await voiceInterface.getAvailableLanguages()
|
||||
languageList = languagesResult.get("languages", []) if isinstance(languagesResult, dict) else languagesResult
|
||||
return {"languages": languageList}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/voice/voices")
|
||||
@limiter.limit("30/minute")
|
||||
async def getVoiceVoices(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
language: str = "de-DE",
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
voicesResult = await voiceInterface.getAvailableVoices(language)
|
||||
voiceList = voicesResult.get("voices", []) if isinstance(voicesResult, dict) else voicesResult
|
||||
return {"voices": voiceList}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/voice/tts")
|
||||
@limiter.limit("10/minute")
|
||||
async def testVoice(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""TTS preview / voice test."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
body = await request.json()
|
||||
text = body.get("text", "Hallo, ich bin dein Coaching-Assistent.")
|
||||
language = body.get("language", "de-DE")
|
||||
voiceId = body.get("voiceId")
|
||||
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
|
||||
try:
|
||||
result = await voiceInterface.textToSpeech(text=text, languageCode=language, voiceName=voiceId)
|
||||
if result and isinstance(result, dict):
|
||||
audioContent = result.get("audioContent")
|
||||
if audioContent:
|
||||
audioB64 = base64.b64encode(
|
||||
audioContent if isinstance(audioContent, bytes) else audioContent.encode()
|
||||
).decode()
|
||||
return {"success": True, "audio": audioB64, "format": "mp3", "text": text}
|
||||
return {"success": False, "error": "TTS returned no audio"}
|
||||
except Exception as e:
|
||||
logger.error(f"Voice test failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"TTS test failed: {str(e)}")
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Export Endpoints (Iteration 2)
|
||||
# =========================================================================
|
||||
|
|
@ -1074,202 +1015,6 @@ async def deletePersonaRoute(
|
|||
return {"deleted": True}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Document Endpoints (Iteration 2)
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}/documents")
|
||||
@limiter.limit("60/minute")
|
||||
async def listDocuments(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
docs = interface.getDocuments(contextId, userId)
|
||||
return {"documents": docs}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts/{contextId}/documents")
|
||||
@limiter.limit("10/minute")
|
||||
async def uploadDocument(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Upload a document and bind it to a context. Stores file in Management DB."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail="Context not found")
|
||||
_validateOwnership(ctx, context)
|
||||
|
||||
form = await request.form()
|
||||
file = form.get("file")
|
||||
if not file or not hasattr(file, "read"):
|
||||
raise HTTPException(status_code=400, detail="No file uploaded")
|
||||
|
||||
content = await file.read()
|
||||
fileName = getattr(file, "filename", "document")
|
||||
mimeType = getattr(file, "content_type", "application/octet-stream")
|
||||
fileSize = len(content)
|
||||
|
||||
if not content:
|
||||
raise HTTPException(status_code=400, detail="Leere Datei hochgeladen")
|
||||
|
||||
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
||||
mgmtInterface = interfaceDbManagement.getInterface(currentUser=context.user)
|
||||
fileItem, _dupType = mgmtInterface.saveUploadedFile(content, fileName)
|
||||
fileRef = fileItem.id
|
||||
|
||||
extractedText = _extractText(content, mimeType, fileName)
|
||||
summary = None
|
||||
if extractedText and len(extractedText.strip()) > 50:
|
||||
try:
|
||||
from .serviceCommcoach import CommcoachService
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
aiResp = await service._callAi(
|
||||
"Du fasst Dokumente in 2-3 Saetzen zusammen.",
|
||||
f"Fasse folgendes Dokument zusammen:\n\n{extractedText[:3000]}"
|
||||
)
|
||||
if aiResp and aiResp.errorCount == 0 and aiResp.content:
|
||||
summary = aiResp.content.strip()
|
||||
except Exception as e:
|
||||
logger.warning(f"Document summary failed: {e}")
|
||||
|
||||
docData = CoachingDocument(
|
||||
contextId=contextId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
instanceId=instanceId,
|
||||
fileName=fileName,
|
||||
mimeType=mimeType,
|
||||
fileSize=fileSize,
|
||||
extractedText=extractedText[:10000] if extractedText else None,
|
||||
summary=summary,
|
||||
fileRef=fileRef,
|
||||
).model_dump()
|
||||
created = interface.createDocument(docData)
|
||||
return {"document": created}
|
||||
|
||||
|
||||
@router.delete("/{instanceId}/documents/{documentId}")
|
||||
@limiter.limit("10/minute")
|
||||
async def deleteDocumentRoute(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
documentId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
doc = interface.getDocument(documentId)
|
||||
if not doc:
|
||||
raise HTTPException(status_code=404, detail="Document not found")
|
||||
_validateOwnership(doc, context)
|
||||
|
||||
fileRef = doc.get("fileRef")
|
||||
if fileRef:
|
||||
try:
|
||||
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
||||
mgmtInterface = interfaceDbManagement.getInterface(
|
||||
currentUser=context.user, mandateId=mandateId, featureInstanceId=instanceId
|
||||
)
|
||||
mgmtInterface.deleteFile(fileRef)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete file {fileRef}: {e}")
|
||||
|
||||
interface.deleteDocument(documentId)
|
||||
return {"deleted": True}
|
||||
|
||||
|
||||
def _extractText(content: bytes, mimeType: str, fileName: str) -> Optional[str]:
|
||||
"""Extract text from uploaded file content (TXT, MD, HTML, PDF, DOCX, XLSX, PPTX)."""
|
||||
import io
|
||||
|
||||
lowerName = fileName.lower()
|
||||
try:
|
||||
if mimeType in ("text/plain",) or lowerName.endswith(".txt"):
|
||||
return content.decode("utf-8", errors="replace")
|
||||
|
||||
if mimeType in ("text/markdown",) or lowerName.endswith(".md"):
|
||||
return content.decode("utf-8", errors="replace")
|
||||
|
||||
if mimeType in ("text/html",) or lowerName.endswith((".html", ".htm")):
|
||||
from html.parser import HTMLParser
|
||||
class _Strip(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._parts: list[str] = []
|
||||
def handle_data(self, d):
|
||||
self._parts.append(d)
|
||||
def result(self):
|
||||
return " ".join(self._parts)
|
||||
parser = _Strip()
|
||||
parser.feed(content.decode("utf-8", errors="replace"))
|
||||
return parser.result()
|
||||
|
||||
if "pdf" in mimeType or lowerName.endswith(".pdf"):
|
||||
try:
|
||||
from PyPDF2 import PdfReader
|
||||
reader = PdfReader(io.BytesIO(content))
|
||||
return "".join(page.extract_text() or "" for page in reader.pages)
|
||||
except ImportError:
|
||||
logger.warning("PyPDF2 not installed, cannot extract PDF text")
|
||||
return None
|
||||
|
||||
if "wordprocessingml" in mimeType or lowerName.endswith(".docx"):
|
||||
try:
|
||||
from docx import Document
|
||||
doc = Document(io.BytesIO(content))
|
||||
return "\n".join(p.text for p in doc.paragraphs if p.text)
|
||||
except ImportError:
|
||||
logger.warning("python-docx not installed, cannot extract DOCX text")
|
||||
return None
|
||||
|
||||
if "spreadsheetml" in mimeType or lowerName.endswith(".xlsx"):
|
||||
try:
|
||||
from openpyxl import load_workbook
|
||||
wb = load_workbook(io.BytesIO(content), read_only=True, data_only=True)
|
||||
parts: list[str] = []
|
||||
for ws in wb.worksheets:
|
||||
for row in ws.iter_rows(values_only=True):
|
||||
cells = [str(c) for c in row if c is not None]
|
||||
if cells:
|
||||
parts.append("\t".join(cells))
|
||||
return "\n".join(parts)
|
||||
except ImportError:
|
||||
logger.warning("openpyxl not installed, cannot extract XLSX text")
|
||||
return None
|
||||
|
||||
if "presentationml" in mimeType or lowerName.endswith(".pptx"):
|
||||
try:
|
||||
from pptx import Presentation
|
||||
prs = Presentation(io.BytesIO(content))
|
||||
parts = []
|
||||
for slide in prs.slides:
|
||||
for shape in slide.shapes:
|
||||
if shape.has_text_frame:
|
||||
parts.append(shape.text_frame.text)
|
||||
return "\n".join(parts)
|
||||
except ImportError:
|
||||
logger.warning("python-pptx not installed, cannot extract PPTX text")
|
||||
return None
|
||||
|
||||
logger.info(f"No text extractor for {fileName} (mime={mimeType})")
|
||||
except Exception as e:
|
||||
logger.warning(f"Text extraction failed for {fileName}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Badge + Score History Endpoints (Iteration 2)
|
||||
# =========================================================================
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -168,29 +168,18 @@ Handlungsprinzip:
|
|||
- Wenn der Benutzer dich bittet, etwas zu erstellen (Dokument, Präsentation, Checkliste, Plan), dann TU ES SOFORT. Frage NICHT nochmals nach Bestätigung.
|
||||
- Verwende alle verfügbaren Informationen aus dem Chat-Verlauf, den Dokumenten und dem Kontext.
|
||||
- Wenn der Benutzer sagt "erstelle", "mach", "schreib", dann liefere das fertige Ergebnis — keine Aufzählung von Punkten, die du "gleich umsetzen wirst".
|
||||
- Dir wird automatisch relevanter Kontext aus früheren Sessions bereitgestellt (Relevant Knowledge). Nutze diesen für Kontinuität und Bezugnahme auf frühere Gespräche.
|
||||
|
||||
Antwortformat:
|
||||
Du antwortest IMMER als reines JSON-Objekt mit exakt diesen Feldern:
|
||||
{"text": "...", "speech": "...", "documents": []}
|
||||
- Antworte direkt als Freitext (KEIN JSON). Markdown-Formatierung ist erlaubt.
|
||||
- Halte Antworten gesprächig und kurz (2-6 Sätze im Normalfall), wie in einem echten Coaching-Gespräch.
|
||||
- Bei komplexen Themen oder wenn der Benutzer Details anfragt, darf die Antwort ausführlicher sein.
|
||||
- Dein Text wird sowohl angezeigt als auch vorgelesen – schreibe daher natürlich und gut sprechbar.
|
||||
|
||||
"text": Dein schriftlicher Chat-Text. Details, Struktur, Übungen, Beispiele. Markdown-Formatierung erlaubt.
|
||||
"speech": Dein gesprochener Kommentar. Natürlich, wie ein Gespräch. Fasse zusammen, kommentiere, motiviere, stelle Fragen. Lies NICHT den Text vor, ergänze ihn mündlich. 2-4 Sätze, reiner Redetext ohne Formatierung.
|
||||
"documents": Dokumente die der Benutzer aufbewahren kann. Erstelle ein Dokument wenn: der Benutzer explizit darum bittet, du strukturierte Inhalte lieferst, oder Material zum Aufbewahren sinnvoll ist. Wenn keine: leeres Array [].
|
||||
|
||||
Dokument-Format:
|
||||
{"title": "Dateiname_mit_Extension.html", "content": "...vollstaendiger Inhalt..."}
|
||||
- Der Title IST der Dateiname inkl. Extension (.html, .md, .txt etc.)
|
||||
- Fuer HTML-Dokumente: Erstelle VOLLSTAENDIGES, professionell gestyltes HTML mit inline CSS. Kein Markdown, sondern fertiges HTML mit Farben, Layout, Typografie.
|
||||
- Fuer andere Dokumente: Verwende Markdown.
|
||||
- WICHTIG: Der Content muss VOLLSTAENDIG und AUSFUEHRLICH sein. Keine Platzhalter, keine "hier kommt..."-Abschnitte. Schreibe echte, detaillierte Inhalte basierend auf allen verfuegbaren Informationen aus dem Chat und den Dokumenten.
|
||||
- Laengenbeschraenkung fuer Dokumente: KEINE. Schreibe so viel wie noetig fuer ein vollstaendiges Ergebnis.
|
||||
|
||||
Kanalverteilung:
|
||||
- Fakten, Listen, Übungen -> text
|
||||
- Empathie, Einordnung, Nachfragen -> speech
|
||||
- Erstellte Dateien, Materialien zum Aufbewahren -> documents
|
||||
|
||||
WICHTIG: Antworte NUR mit dem JSON-Objekt. Kein Text vor oder nach dem JSON."""
|
||||
Tool-Nutzung:
|
||||
- Du hast Zugriff auf Tools (Dateien lesen, Web-Suche, Datenquellen abfragen) wenn der Benutzer Dateien/Quellen angehängt hat oder Recherche benötigt.
|
||||
- Nutze Tools NUR wenn nötig. Für normales Coaching-Gespräch: antworte direkt ohne Tools.
|
||||
- Wenn du ein Tool nutzt, erkläre kurz was du tust."""
|
||||
|
||||
if contextDescription:
|
||||
prompt += f"\n\nKontext-Beschreibung: {contextDescription}"
|
||||
|
|
@ -229,12 +218,18 @@ WICHTIG: Antworte NUR mit dem JSON-Objekt. Kein Text vor oder nach dem JSON."""
|
|||
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
||||
|
||||
if retrievedByTopic:
|
||||
prompt += "\n\nRelevante Sessions zum angefragten Thema:"
|
||||
for s in retrievedByTopic[:3]:
|
||||
summary = s.get("summary", "")
|
||||
prompt += "\n\nRelevante Sessions und Mandantenwissen zum angefragten Thema:"
|
||||
for s in retrievedByTopic[:5]:
|
||||
summary = s.get("summary", s.get("content", ""))
|
||||
if not summary:
|
||||
continue
|
||||
dateStr = s.get("date", "")
|
||||
if summary:
|
||||
prompt += f"\n- [{dateStr}] {summary[:300]}"
|
||||
if s.get("source") == "rag":
|
||||
label = s.get("ragSourceLabel") or "Mandantenwissen"
|
||||
prompt += f"\n- [Wissen: {label}] {summary[:320]}"
|
||||
else:
|
||||
prefix = f"[{dateStr}] " if dateStr else ""
|
||||
prompt += f"\n- {prefix}{summary[:300]}"
|
||||
|
||||
if openTasks:
|
||||
prompt += "\n\nOffene Aufgaben:"
|
||||
|
|
@ -273,7 +268,7 @@ Fuer ein NEUES Dokument: {"title": "...", "content": "...Inhalt..."}"""
|
|||
|
||||
|
||||
def buildSummaryPrompt(messages: List[Dict[str, Any]], contextTitle: str) -> str:
|
||||
"""Build a prompt to generate a session summary as JSON with plain text and styled HTML email."""
|
||||
"""Build a prompt to generate a session summary plus structured email content."""
|
||||
conversation = ""
|
||||
for msg in messages:
|
||||
role = "Benutzer" if msg.get("role") == "user" else "Coach"
|
||||
|
|
@ -281,27 +276,33 @@ def buildSummaryPrompt(messages: List[Dict[str, Any]], contextTitle: str) -> str
|
|||
|
||||
return f"""Erstelle eine Zusammenfassung dieser Coaching-Session zum Thema "{contextTitle}".
|
||||
|
||||
Antworte AUSSCHLIESSLICH als JSON mit zwei Feldern:
|
||||
Antworte AUSSCHLIESSLICH als JSON im folgenden Format:
|
||||
|
||||
{{
|
||||
"summary": "Kompakte Zusammenfassung als Plaintext (fuer Anzeige in der App). Struktur: 1. Kernthema, 2. Erkenntnisse, 3. Naechste Schritte, 4. Fortschritt.",
|
||||
"emailHtml": "<div>...</div>"
|
||||
"summary": "Kompakte Plaintext-Zusammenfassung fuer die App. Struktur: Kernthema, Erkenntnisse, Naechste Schritte, Fortschritt.",
|
||||
"email": {{
|
||||
"headline": "Kurze, professionelle Titelzeile fuer die E-Mail",
|
||||
"intro": "1-2 Saetze, die den Kern der Session auf den Punkt bringen",
|
||||
"coreTopic": "Das zentrale Thema in einem praezisen Satz",
|
||||
"insights": ["Erkenntnis 1", "Erkenntnis 2"],
|
||||
"nextSteps": ["Naechster Schritt 1", "Naechster Schritt 2"],
|
||||
"progress": ["Fortschritt 1", "Fortschritt 2"]
|
||||
}}
|
||||
}}
|
||||
|
||||
Fuer "emailHtml": Erstelle ein professionell formatiertes HTML-Fragment (KEIN vollstaendiges HTML-Dokument, nur der Inhalt-Block).
|
||||
Verwende inline CSS fuer schoene Darstellung in E-Mail-Clients:
|
||||
- Verwende <h3> fuer Abschnitte (color: #1e40af; margin: 20px 0 8px; font-size: 16px)
|
||||
- Verwende <ul>/<li> fuer Stichpunkte (margin: 4px 0; line-height: 1.6)
|
||||
- Verwende <strong> fuer Hervorhebungen
|
||||
- Verwende <p> fuer Fliesstext (color: #374151; line-height: 1.65; font-size: 15px)
|
||||
- Verwende <hr style="border:none;border-top:1px solid #e5e7eb;margin:20px 0"> als Trenner
|
||||
|
||||
Fuer "summary": Kompakter Plaintext ohne HTML/Markdown. Abschnitte mit Zeilenumbruechen trennen.
|
||||
Regeln:
|
||||
- KEIN HTML erzeugen.
|
||||
- "summary" ist reiner Plaintext ohne Markdown.
|
||||
- "headline" kurz und professionell.
|
||||
- "intro" in natuerlichem Business-Deutsch.
|
||||
- "insights", "nextSteps" und "progress" jeweils als kurze Stichpunkte.
|
||||
- Maximal 4 Eintraege pro Liste.
|
||||
- Wenn eine Liste leer ist, gib [] zurueck.
|
||||
|
||||
Gespräch:
|
||||
{conversation}
|
||||
|
||||
Antworte auf Deutsch, sachlich und kompakt. NUR JSON, keine Erklaerungen."""
|
||||
Antworte auf Deutsch, sachlich, klar und kompakt. NUR JSON, keine Erklaerungen."""
|
||||
|
||||
|
||||
def buildScoringPrompt(messages: List[Dict[str, Any]], contextCategory: str) -> str:
|
||||
|
|
|
|||
|
|
@ -172,20 +172,48 @@ def searchSessionsByTopic(
|
|||
|
||||
|
||||
def searchSessionsByTopicRag(
|
||||
sessions: List[Dict[str, Any]],
|
||||
query: str,
|
||||
maxResults: int = TOPIC_SEARCH_MAX_RESULTS,
|
||||
embeddingProvider: Optional[Any] = None,
|
||||
userId: str,
|
||||
instanceId: str,
|
||||
mandateId: str = None,
|
||||
queryVector: List[float] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Search using platform RAG (semantic search across mandate-wide knowledge data).
|
||||
|
||||
Requires a pre-computed queryVector (embedding). The caller is responsible
|
||||
for generating the embedding via AiService.callEmbedding before invoking this.
|
||||
"""
|
||||
Phase 7 RAG: Semantic search via embeddings.
|
||||
When embeddingProvider is None, falls back to keyword search.
|
||||
Future: Pass embeddingProvider that has embed(text) -> vector and similarity search.
|
||||
"""
|
||||
if embeddingProvider is None:
|
||||
return searchSessionsByTopic(sessions, query, maxResults)
|
||||
# TODO: When embedding API exists: embed query, embed session summaries, cosine similarity
|
||||
return searchSessionsByTopic(sessions, query, maxResults)
|
||||
if not queryVector:
|
||||
logger.warning("searchSessionsByTopicRag called without queryVector, skipping RAG search")
|
||||
return []
|
||||
try:
|
||||
from modules.interfaces.interfaceDbKnowledge import getInterface as _getKnowledgeInterface
|
||||
|
||||
knowledgeDb = _getKnowledgeInterface()
|
||||
|
||||
results = knowledgeDb.semanticSearch(
|
||||
queryVector=queryVector,
|
||||
userId=userId,
|
||||
featureInstanceId=instanceId,
|
||||
mandateId=mandateId,
|
||||
isSysAdmin=False,
|
||||
limit=TOPIC_SEARCH_MAX_RESULTS,
|
||||
)
|
||||
|
||||
formatted = []
|
||||
for r in (results or []):
|
||||
rData = r if isinstance(r, dict) else r.model_dump() if hasattr(r, "model_dump") else {}
|
||||
contextRef = rData.get("contextRef") or {}
|
||||
formatted.append({
|
||||
"source": "rag",
|
||||
"content": rData.get("data") or rData.get("summary") or "",
|
||||
"fileName": contextRef.get("containerPath") or "RAG-Ergebnis",
|
||||
"score": rData.get("_score") or 0,
|
||||
})
|
||||
return formatted
|
||||
except Exception as e:
|
||||
logger.warning(f"RAG search failed for query '{query[:50]}': {e}")
|
||||
return []
|
||||
|
||||
|
||||
def buildSessionSummariesForPrompt(
|
||||
|
|
|
|||
223
modules/features/commcoach/serviceCommcoachIndexer.py
Normal file
223
modules/features/commcoach/serviceCommcoachIndexer.py
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
CommCoach Session Indexer.
|
||||
Indexes coaching session data into the knowledge store (pgvector) for RAG-based long-term memory.
|
||||
Called after session completion to ensure semantic searchability across 20+ sessions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import json
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_COACHING_FILE_PREFIX = "coaching-session:"
|
||||
|
||||
|
||||
async def indexSessionData(
|
||||
sessionId: str,
|
||||
contextId: str,
|
||||
userId: str,
|
||||
featureInstanceId: str,
|
||||
mandateId: str,
|
||||
messages: List[Dict[str, Any]],
|
||||
summary: Optional[str],
|
||||
keyTopics: Optional[str],
|
||||
goals: Optional[List[Any]],
|
||||
insights: Optional[List[Any]],
|
||||
tasks: Optional[List[Dict[str, Any]]],
|
||||
contextTitle: str = "",
|
||||
knowledgeService=None,
|
||||
):
|
||||
"""Index a completed coaching session into the knowledge store.
|
||||
|
||||
Creates ContentChunks with embeddings for:
|
||||
- Each User+Assistant message pair (maximum detail depth)
|
||||
- Session summary
|
||||
- Key topics (individually, for precise retrieval)
|
||||
- Current goals
|
||||
- New insights
|
||||
- Tasks (open + done)
|
||||
"""
|
||||
if not knowledgeService:
|
||||
logger.warning("No knowledge service available for coaching indexer")
|
||||
return
|
||||
|
||||
syntheticFileId = f"{_COACHING_FILE_PREFIX}{sessionId}"
|
||||
|
||||
chunks = []
|
||||
|
||||
# 1. Message pairs (User + Assistant) as individual chunks
|
||||
messagePairs = _extractMessagePairs(messages)
|
||||
for idx, pair in enumerate(messagePairs):
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:msg-pair:{idx}",
|
||||
"data": pair["text"],
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": f"message-pair-{idx}",
|
||||
"type": "coaching-message-pair",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
# 2. Session summary
|
||||
if summary:
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:summary",
|
||||
"data": f"Session-Zusammenfassung ({contextTitle}): {summary}",
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": "summary",
|
||||
"type": "coaching-session-summary",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
# 3. Key topics (each as separate chunk for precise retrieval)
|
||||
parsedTopics = _parseJsonSafe(keyTopics, [])
|
||||
for tidx, topic in enumerate(parsedTopics):
|
||||
topicStr = str(topic).strip()
|
||||
if topicStr:
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:topic:{tidx}",
|
||||
"data": f"Coaching-Thema ({contextTitle}): {topicStr}",
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": f"topic-{tidx}",
|
||||
"type": "coaching-key-topic",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
# 4. Goals
|
||||
if goals:
|
||||
goalTexts = [g.get("text", g) if isinstance(g, dict) else str(g) for g in goals if g]
|
||||
if goalTexts:
|
||||
goalsStr = "\n".join(f"- {g}" for g in goalTexts)
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:goals",
|
||||
"data": f"Coaching-Ziele ({contextTitle}):\n{goalsStr}",
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": "goals",
|
||||
"type": "coaching-goals",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
# 5. Insights
|
||||
if insights:
|
||||
insightTexts = [i.get("text", i) if isinstance(i, dict) else str(i) for i in insights if i]
|
||||
if insightTexts:
|
||||
insightsStr = "\n".join(f"- {t}" for t in insightTexts)
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:insights",
|
||||
"data": f"Coaching-Erkenntnisse ({contextTitle}):\n{insightsStr}",
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": "insights",
|
||||
"type": "coaching-insights",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
# 6. Tasks
|
||||
if tasks:
|
||||
taskLines = []
|
||||
for t in tasks:
|
||||
status = t.get("status", "open")
|
||||
title = t.get("title", "")
|
||||
if title:
|
||||
taskLines.append(f"- [{status}] {title}")
|
||||
if taskLines:
|
||||
tasksStr = "\n".join(taskLines)
|
||||
chunks.append({
|
||||
"contentObjectId": f"{sessionId}:tasks",
|
||||
"data": f"Coaching-Aufgaben ({contextTitle}):\n{tasksStr}",
|
||||
"contextRef": {
|
||||
"containerPath": f"session:{sessionId}",
|
||||
"location": "tasks",
|
||||
"type": "coaching-tasks",
|
||||
"contextId": contextId,
|
||||
"sessionId": sessionId,
|
||||
"contextTitle": contextTitle,
|
||||
},
|
||||
})
|
||||
|
||||
if not chunks:
|
||||
logger.info(f"No chunks to index for session {sessionId}")
|
||||
return
|
||||
|
||||
logger.info(f"Indexing {len(chunks)} chunks for coaching session {sessionId}")
|
||||
|
||||
try:
|
||||
contentObjects = [
|
||||
{
|
||||
"contentObjectId": c["contentObjectId"],
|
||||
"contentType": "text",
|
||||
"data": c["data"],
|
||||
"contextRef": c["contextRef"],
|
||||
}
|
||||
for c in chunks
|
||||
]
|
||||
|
||||
await knowledgeService.indexFile(
|
||||
fileId=syntheticFileId,
|
||||
fileName=f"coaching-session-{sessionId[:8]}",
|
||||
mimeType="application/x-coaching-session",
|
||||
userId=userId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
mandateId=mandateId,
|
||||
contentObjects=contentObjects,
|
||||
)
|
||||
logger.info(f"Successfully indexed coaching session {sessionId} ({len(chunks)} chunks)")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to index coaching session {sessionId}: {e}", exc_info=True)
|
||||
|
||||
|
||||
def _extractMessagePairs(messages: List[Dict[str, Any]]) -> List[Dict[str, str]]:
|
||||
"""Extract User+Assistant pairs from message list."""
|
||||
pairs = []
|
||||
i = 0
|
||||
while i < len(messages):
|
||||
msg = messages[i]
|
||||
if msg.get("role") == "user":
|
||||
userText = (msg.get("content") or "").strip()
|
||||
assistantText = ""
|
||||
if i + 1 < len(messages) and messages[i + 1].get("role") == "assistant":
|
||||
assistantText = (messages[i + 1].get("content") or "").strip()
|
||||
i += 2
|
||||
else:
|
||||
i += 1
|
||||
if userText:
|
||||
text = f"Benutzer: {userText}"
|
||||
if assistantText:
|
||||
text += f"\nCoach: {assistantText}"
|
||||
pairs.append({"text": text})
|
||||
else:
|
||||
i += 1
|
||||
return pairs
|
||||
|
||||
|
||||
def _parseJsonSafe(value, fallback):
|
||||
if not value:
|
||||
return fallback
|
||||
if isinstance(value, (list, dict)):
|
||||
return value
|
||||
try:
|
||||
return json.loads(value)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return fallback
|
||||
|
|
@ -6,11 +6,44 @@ Handles daily reminders and scheduled email summaries.
|
|||
"""
|
||||
|
||||
import logging
|
||||
import html
|
||||
from typing import Dict, Any, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _buildReminderHtmlBlock(contextTitles: List[str], streakDays: int) -> str:
|
||||
rows = "".join(
|
||||
'<tr>'
|
||||
'<td valign="top" style="padding:0 10px 8px 0;font-size:15px;line-height:1.6;color:#2563eb;">•</td>'
|
||||
f'<td style="padding:0 0 8px 0;font-size:15px;line-height:1.6;color:#374151;">{html.escape(title)}</td>'
|
||||
'</tr>'
|
||||
for title in contextTitles[:3]
|
||||
)
|
||||
topicsBlock = (
|
||||
'<table role="presentation" width="100%" cellpadding="0" cellspacing="0" '
|
||||
'style="border-collapse:separate;border-spacing:0;border:1px solid #e5e7eb;border-radius:12px;'
|
||||
'background-color:#ffffff;margin:0 0 16px 0;">'
|
||||
'<tr><td style="padding:18px 20px;">'
|
||||
'<div style="font-size:12px;font-weight:700;letter-spacing:0.06em;text-transform:uppercase;'
|
||||
'color:#1d4ed8;margin:0 0 8px 0;">Aktive Coaching-Themen</div>'
|
||||
f'<table role="presentation" cellpadding="0" cellspacing="0" style="border-collapse:collapse;">{rows}</table>'
|
||||
'</td></tr></table>'
|
||||
)
|
||||
streakBlock = (
|
||||
'<table role="presentation" width="100%" cellpadding="0" cellspacing="0" '
|
||||
'style="border-collapse:separate;border-spacing:0;border:1px solid #dbeafe;border-radius:12px;'
|
||||
'background:linear-gradient(135deg,#eff6ff,#f8fbff);">'
|
||||
'<tr><td style="padding:18px 20px;">'
|
||||
'<div style="font-size:12px;font-weight:700;letter-spacing:0.06em;text-transform:uppercase;'
|
||||
'color:#1d4ed8;margin:0 0 8px 0;">Dein Rhythmus</div>'
|
||||
f'<div style="font-size:15px;line-height:1.7;color:#374151;">Aktueller Streak: '
|
||||
f'<strong>{int(streakDays or 0)} Tage</strong></div>'
|
||||
'</td></tr></table>'
|
||||
)
|
||||
return topicsBlock + streakBlock
|
||||
|
||||
|
||||
def registerScheduledJobs(eventManagement):
|
||||
"""Register CommCoach scheduled jobs with the event management system."""
|
||||
try:
|
||||
|
|
@ -31,6 +64,7 @@ async def _runDailyReminders():
|
|||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
|
||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
||||
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||
db = DatabaseConnector(
|
||||
|
|
@ -71,15 +105,21 @@ async def _runDailyReminders():
|
|||
contextTitles = [c.get("title", "Unbenannt") for c in contexts[:3]]
|
||||
contextList = ", ".join(contextTitles)
|
||||
|
||||
subject = "Dein taegliches Coaching wartet"
|
||||
message = f"""
|
||||
<h2>Zeit fuer dein Coaching</h2>
|
||||
<p>Du hast aktive Coaching-Themen: <strong>{contextList}</strong></p>
|
||||
<p>Nimm dir 10 Minuten fuer eine kurze Session. Konsistenz ist der Schluessel zu Fortschritt.</p>
|
||||
<p>Dein aktueller Streak: <strong>{profile.get('streakDays', 0)} Tage</strong></p>
|
||||
"""
|
||||
subject = "Dein tägliches Coaching wartet"
|
||||
mandateName = _resolveMandateName(profile.get("mandateId"))
|
||||
htmlMessage = _renderHtmlEmail(
|
||||
"Zeit für dein tägliches Coaching",
|
||||
[
|
||||
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",
|
||||
"Schon 10 Minuten reichen oft, um einen Gedanken zu klären, eine nächste Aktion festzulegen oder ein Gespräch vorzubereiten.",
|
||||
f"Im Fokus: {contextList}",
|
||||
],
|
||||
mandateName,
|
||||
footerNote="Diese Erinnerung wurde automatisch auf Basis deiner CommCoach-Einstellungen versendet.",
|
||||
rawHtmlBlock=_buildReminderHtmlBlock(contextTitles, int(profile.get("streakDays", 0) or 0)),
|
||||
)
|
||||
|
||||
messaging.send("email", user.email, subject, message)
|
||||
messaging.send("email", user.email, subject, htmlMessage)
|
||||
sentCount += 1
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to send reminder to user {profile.get('userId')}: {e}")
|
||||
|
|
|
|||
|
|
@ -136,7 +136,6 @@ class TestCoachingUserProfile:
|
|||
profile = CoachingUserProfile(
|
||||
userId="u1", mandateId="m1", instanceId="i1",
|
||||
)
|
||||
assert profile.preferredLanguage == "de-DE"
|
||||
assert profile.dailyReminderEnabled is False
|
||||
assert profile.emailSummaryEnabled is True
|
||||
assert profile.streakDays == 0
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class TestFeatureDefinition:
|
|||
assert defn["code"] == "commcoach"
|
||||
assert "label" in defn
|
||||
assert "icon" in defn
|
||||
assert defn["autoCreateInstance"] is True
|
||||
assert defn["autoCreateInstance"] is False
|
||||
|
||||
|
||||
class TestRbacObjects:
|
||||
|
|
|
|||
|
|
@ -3,17 +3,33 @@
|
|||
"""Neutralizer models: DataNeutraliserConfig and DataNeutralizerAttributes."""
|
||||
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
|
||||
|
||||
class DataNeutraliserConfig(BaseModel):
|
||||
class DataScope(str, Enum):
|
||||
PERSONAL = "personal"
|
||||
FEATURE_INSTANCE = "featureInstance"
|
||||
MANDATE = "mandate"
|
||||
GLOBAL = "global"
|
||||
|
||||
|
||||
class DataNeutraliserConfig(PowerOnModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the configuration", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
mandateId: str = Field(description="ID of the mandate this configuration belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
featureInstanceId: str = Field(description="ID of the feature instance this configuration belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
userId: str = Field(description="ID of the user who created this configuration", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
enabled: bool = Field(default=True, description="Whether data neutralization is enabled", json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False})
|
||||
scope: str = Field(default="personal", description="Data visibility scope: personal, featureInstance, mandate, global", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": {"en": "Personal", "de": "Persönlich"}},
|
||||
{"value": "featureInstance", "label": {"en": "Feature Instance", "de": "Feature-Instanz"}},
|
||||
{"value": "mandate", "label": {"en": "Mandate", "de": "Mandant"}},
|
||||
{"value": "global", "label": {"en": "Global", "de": "Global"}},
|
||||
]})
|
||||
neutralizationStatus: str = Field(default="not_required", description="Status of neutralization: pending, completed, failed, not_required", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
namesToParse: str = Field(default="", description="Multiline list of names to parse for neutralization", json_schema_extra={"frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
|
||||
sharepointSourcePath: str = Field(default="", description="SharePoint path to read files for neutralization", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
|
||||
sharepointTargetPath: str = Field(default="", description="SharePoint path to store neutralized files", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
|
||||
|
|
@ -26,6 +42,8 @@ registerModelLabels(
|
|||
"featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
|
||||
"userId": {"en": "User ID", "fr": "ID utilisateur"},
|
||||
"enabled": {"en": "Enabled", "fr": "Activé"},
|
||||
"scope": {"en": "Scope", "fr": "Portée"},
|
||||
"neutralizationStatus": {"en": "Neutralization Status", "fr": "Statut de neutralisation"},
|
||||
"namesToParse": {"en": "Names to Parse", "fr": "Noms à analyser"},
|
||||
"sharepointSourcePath": {"en": "Source Path", "fr": "Chemin source"},
|
||||
"sharepointTargetPath": {"en": "Target Path", "fr": "Chemin cible"},
|
||||
|
|
@ -40,6 +58,17 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
originalText: str = Field(description="Original text that was neutralized", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
fileId: Optional[str] = Field(default=None, description="ID of the file this attribute belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
patternType: str = Field(description="Type of pattern that matched (email, phone, name, etc.)", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
|
||||
|
||||
class DataNeutralizationSnapshot(BaseModel):
|
||||
"""Stores the full neutralized text (with embedded placeholders) per source."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
mandateId: str = Field(description="Mandate scope")
|
||||
featureInstanceId: str = Field(default="", description="Feature instance scope")
|
||||
userId: str = Field(description="User who triggered neutralization")
|
||||
sourceLabel: str = Field(description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'")
|
||||
neutralizedText: str = Field(description="Full text with [type.uuid] placeholders embedded")
|
||||
placeholderCount: int = Field(default=0, description="Number of placeholders in the text")
|
||||
registerModelLabels(
|
||||
"DataNeutralizerAttributes",
|
||||
{"en": "Neutralized Data Attribute", "fr": "Attribut de données neutralisées"},
|
||||
|
|
@ -53,5 +82,18 @@ registerModelLabels(
|
|||
"patternType": {"en": "Pattern Type", "fr": "Type de modèle"},
|
||||
},
|
||||
)
|
||||
registerModelLabels(
|
||||
"DataNeutralizationSnapshot",
|
||||
{"en": "Neutralization Snapshot", "de": "Neutralisierungs-Snapshot"},
|
||||
{
|
||||
"id": {"en": "ID"},
|
||||
"mandateId": {"en": "Mandate ID"},
|
||||
"featureInstanceId": {"en": "Feature Instance ID"},
|
||||
"userId": {"en": "User ID"},
|
||||
"sourceLabel": {"en": "Source", "de": "Quelle"},
|
||||
"neutralizedText": {"en": "Neutralized Text", "de": "Neutralisierter Text"},
|
||||
"placeholderCount": {"en": "Placeholders", "de": "Platzhalter"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from typing import Dict, List, Any, Optional
|
|||
from modules.features.neutralization.datamodelFeatureNeutralizer import (
|
||||
DataNeutraliserConfig,
|
||||
DataNeutralizerAttributes,
|
||||
DataNeutralizationSnapshot,
|
||||
)
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
||||
|
|
@ -212,6 +213,89 @@ class InterfaceFeatureNeutralizer:
|
|||
logger.error(f"Error getting attribute by ID: {str(e)}")
|
||||
return None
|
||||
|
||||
def deleteAttributeById(self, attributeId: str) -> bool:
|
||||
"""Delete a single neutralization attribute by its ID"""
|
||||
try:
|
||||
attribute = self.getAttributeById(attributeId)
|
||||
if not attribute:
|
||||
logger.warning(f"Attribute {attributeId} not found for deletion")
|
||||
return False
|
||||
|
||||
self.db.recordDelete(DataNeutralizerAttributes, attributeId)
|
||||
logger.info(f"Deleted neutralization attribute {attributeId}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting attribute by ID: {str(e)}")
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Snapshot CRUD
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def getSnapshots(self) -> List[DataNeutralizationSnapshot]:
|
||||
"""Return all neutralization snapshots for the current mandate + feature instance."""
|
||||
try:
|
||||
_filter: Dict[str, Any] = {"mandateId": self.mandateId}
|
||||
if self.featureInstanceId:
|
||||
_filter["featureInstanceId"] = self.featureInstanceId
|
||||
rows = getRecordsetWithRBAC(
|
||||
self.db,
|
||||
DataNeutralizationSnapshot,
|
||||
self.currentUser,
|
||||
recordFilter=_filter,
|
||||
mandateId=self.mandateId,
|
||||
)
|
||||
return [
|
||||
DataNeutralizationSnapshot(**{k: v for k, v in r.items() if not k.startswith("_")})
|
||||
for r in rows
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting snapshots: {e}")
|
||||
return []
|
||||
|
||||
def clearSnapshots(self) -> int:
|
||||
"""Delete all snapshots for the current feature-instance scope. Returns count deleted."""
|
||||
try:
|
||||
_filter: Dict[str, Any] = {"mandateId": self.mandateId}
|
||||
if self.featureInstanceId:
|
||||
_filter["featureInstanceId"] = self.featureInstanceId
|
||||
existing = self.db.getRecordset(DataNeutralizationSnapshot, recordFilter=_filter)
|
||||
for row in existing:
|
||||
self.db.recordDelete(DataNeutralizationSnapshot, row["id"])
|
||||
return len(existing)
|
||||
except Exception as e:
|
||||
logger.error(f"Error clearing snapshots: {e}")
|
||||
return 0
|
||||
|
||||
def createSnapshot(
|
||||
self,
|
||||
sourceLabel: str,
|
||||
neutralizedText: str,
|
||||
placeholderCount: int = 0,
|
||||
) -> Optional[DataNeutralizationSnapshot]:
|
||||
"""Persist one neutralization snapshot."""
|
||||
try:
|
||||
if not self.userId:
|
||||
logger.warning("Cannot create snapshot: missing userId")
|
||||
return None
|
||||
snap = DataNeutralizationSnapshot(
|
||||
mandateId=self.mandateId or "",
|
||||
featureInstanceId=self.featureInstanceId or "",
|
||||
userId=self.userId,
|
||||
sourceLabel=sourceLabel,
|
||||
neutralizedText=neutralizedText,
|
||||
placeholderCount=placeholderCount,
|
||||
)
|
||||
created = self.db.recordCreate(DataNeutralizationSnapshot, snap.model_dump())
|
||||
return DataNeutralizationSnapshot(**{k: v for k, v in created.items() if not k.startswith("_")})
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating snapshot: {e}")
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Attribute CRUD
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def createAttribute(
|
||||
self,
|
||||
attributeId: str,
|
||||
|
|
|
|||
|
|
@ -45,34 +45,55 @@ RESOURCE_OBJECTS = [
|
|||
|
||||
# Template roles for this feature
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "neutralization-viewer",
|
||||
"description": {
|
||||
"en": "Neutralization Viewer - View neutralization data (read-only)",
|
||||
"de": "Neutralisierungs-Betrachter - Neutralisierungsdaten einsehen (nur lesen)",
|
||||
"fr": "Visualiseur neutralisation - Consulter les données de neutralisation (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.neutralization.playground", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "neutralization-user",
|
||||
"description": {
|
||||
"en": "Neutralization User - Use neutralization tools and manage own data",
|
||||
"de": "Neutralisierungs-Benutzer - Neutralisierungstools nutzen und eigene Daten verwalten",
|
||||
"fr": "Utilisateur neutralisation - Utiliser les outils et gérer ses propres données",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.neutralization.playground", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.neutralization.attributes", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "neutralization-admin",
|
||||
"description": {
|
||||
"en": "Neutralization Administrator - Full access to neutralization settings and data",
|
||||
"de": "Neutralisierungs-Administrator - Vollzugriff auf Neutralisierungs-Einstellungen und Daten",
|
||||
"fr": "Administrateur neutralisation - Accès complet aux paramètres et données"
|
||||
"fr": "Administrateur neutralisation - Accès complet aux paramètres et données",
|
||||
},
|
||||
"accessRules": [
|
||||
# Full UI access (all views including admin views)
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
# Full DATA access
|
||||
{"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "neutralization-analyst",
|
||||
"description": {
|
||||
"en": "Neutralization Analyst - Analyze and process neutralization data",
|
||||
"de": "Neutralisierungs-Analyst - Neutralisierungsdaten analysieren und verarbeiten",
|
||||
"fr": "Analyste neutralisation - Analyser et traiter les données de neutralisation"
|
||||
"fr": "Analyste neutralisation - Analyser et traiter les données de neutralisation",
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to specific views - vollqualifizierte ObjectKeys
|
||||
{"context": "UI", "item": "ui.feature.neutralization.playground", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.neutralization.attributes", "view": True},
|
||||
# Group-level DATA access (read-only for sensitive config)
|
||||
{"context": "DATA", "item": None, "view": True, "read": "g", "create": "n", "update": "n", "delete": "n"},
|
||||
]
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ from typing import Any, Dict, List, Optional
|
|||
from urllib.parse import urlparse, unquote
|
||||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from .datamodelFeatureNeutralizer import DataNeutralizerAttributes, DataNeutraliserConfig
|
||||
from .datamodelFeatureNeutralizer import DataNeutralizerAttributes, DataNeutraliserConfig, DataNeutralizationSnapshot
|
||||
from .interfaceFeatureNeutralizer import getInterface as _getNeutralizerInterface
|
||||
from modules.serviceHub import getInterface as getServices
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -85,7 +86,7 @@ class NeutralizationPlayground:
|
|||
'neutralized_file_id': None,
|
||||
'processed_info': {'type': 'error', 'error': 'File could not be decoded as text. Supported: UTF-8, Latin-1. For PDF/Word/Excel, use supported binary formats.'}
|
||||
}
|
||||
result = self.services.neutralization.processText(text_content)
|
||||
result = await self.services.neutralization.processTextAsync(text_content)
|
||||
result['neutralized_file_name'] = f'neutralized_{filename}'
|
||||
# Save neutralized text as file to user files
|
||||
if self.services.interfaceDbComponent and result.get('neutralized_text') is not None:
|
||||
|
|
@ -129,6 +130,11 @@ class NeutralizationPlayground:
|
|||
}
|
||||
|
||||
|
||||
# Delete a single attribute by ID
|
||||
def deleteAttribute(self, attributeId: str) -> bool:
|
||||
interface = _getNeutralizerInterface(self.currentUser, self.mandateId, self.featureInstanceId)
|
||||
return interface.deleteAttributeById(attributeId)
|
||||
|
||||
# Cleanup attributes
|
||||
def cleanAttributes(self, fileId: str) -> bool:
|
||||
return self.services.neutralization.deleteNeutralizationAttributes(fileId)
|
||||
|
|
@ -192,12 +198,28 @@ class NeutralizationPlayground:
|
|||
"""Resolve UIDs in neutralized text back to original text"""
|
||||
return self.services.neutralization.resolveText(text)
|
||||
|
||||
def getSnapshots(self) -> List[DataNeutralizationSnapshot]:
|
||||
"""Return stored neutralization text snapshots."""
|
||||
try:
|
||||
return self.services.neutralization.getSnapshots()
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting snapshots: {e}")
|
||||
return []
|
||||
|
||||
def getAttributes(self, fileId: str = None) -> List[DataNeutralizerAttributes]:
|
||||
"""Get neutralization attributes, optionally filtered by file ID"""
|
||||
try:
|
||||
allAttributes = self.services.neutralization.getAttributes()
|
||||
if fileId:
|
||||
return [attr for attr in allAttributes if attr.fileId == fileId]
|
||||
want = str(fileId).strip()
|
||||
|
||||
def _matches(a: DataNeutralizerAttributes) -> bool:
|
||||
af = a.fileId
|
||||
if af is None or (isinstance(af, str) and not str(af).strip()):
|
||||
return False
|
||||
return str(af).strip() == want
|
||||
|
||||
return [attr for attr in allAttributes if _matches(attr)]
|
||||
return allAttributes
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting attributes: {str(e)}")
|
||||
|
|
@ -390,7 +412,7 @@ class SharepointProcessor:
|
|||
textContent = fileContent.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
textContent = fileContent.decode('latin-1')
|
||||
result = self.services.neutralization.processText(textContent)
|
||||
result = await self.services.neutralization.processTextAsync(textContent)
|
||||
content_to_upload = (result.get('neutralized_text') or '').encode('utf-8')
|
||||
|
||||
neutralizedFilename = f"neutralized_{fileInfo['name']}"
|
||||
|
|
|
|||
|
|
@ -8,12 +8,33 @@ import logging
|
|||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
|
||||
# Import interfaces
|
||||
from .datamodelFeatureNeutralizer import DataNeutraliserConfig, DataNeutralizerAttributes
|
||||
from .datamodelFeatureNeutralizer import DataNeutraliserConfig, DataNeutralizerAttributes, DataNeutralizationSnapshot
|
||||
from .neutralizePlayground import NeutralizationPlayground
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _assertFeatureInstancePathMatchesContext(featureInstanceIdFromPath: str, context: RequestContext) -> None:
|
||||
"""Reject path/instance mismatch when request context already carries an instance id."""
|
||||
ctxId = str(context.featureInstanceId).strip() if getattr(context, "featureInstanceId", None) else ""
|
||||
pathId = (featureInstanceIdFromPath or "").strip()
|
||||
if ctxId and pathId and pathId != ctxId:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Feature instance id in URL does not match request context (X-Instance-Id)",
|
||||
)
|
||||
|
||||
|
||||
def _fetchNeutralizationAttributes(context: RequestContext, fileId: Optional[str]) -> List[DataNeutralizerAttributes]:
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return service.getAttributes(fileId)
|
||||
|
||||
|
||||
# Create router for neutralization endpoints
|
||||
router = APIRouter(
|
||||
prefix="/api/neutralization",
|
||||
|
|
@ -208,15 +229,9 @@ def get_neutralization_attributes(
|
|||
) -> List[DataNeutralizerAttributes]:
|
||||
"""Get neutralization attributes, optionally filtered by file ID"""
|
||||
try:
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
|
||||
)
|
||||
attributes = service.getAttributes(fileId)
|
||||
|
||||
return attributes
|
||||
|
||||
return _fetchNeutralizationAttributes(context, fileId)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting neutralization attributes: {str(e)}")
|
||||
raise HTTPException(
|
||||
|
|
@ -224,6 +239,72 @@ def get_neutralization_attributes(
|
|||
detail=f"Error getting neutralization attributes: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{feature_instance_id}/attributes", response_model=List[DataNeutralizerAttributes])
|
||||
@limiter.limit("30/minute")
|
||||
def get_neutralization_attributes_scoped(
|
||||
request: Request,
|
||||
feature_instance_id: str = Path(..., description="Workspace / feature instance id (must match X-Instance-Id when set)"),
|
||||
fileId: Optional[str] = Query(None, description="Filter by file ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> List[DataNeutralizerAttributes]:
|
||||
"""Same as GET /attributes; path includes instance id for workspace UI compatibility."""
|
||||
_assertFeatureInstancePathMatchesContext(feature_instance_id, context)
|
||||
try:
|
||||
return _fetchNeutralizationAttributes(context, fileId)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting neutralization attributes: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Error getting neutralization attributes: {str(e)}"
|
||||
)
|
||||
|
||||
@router.get("/snapshots", response_model=List[DataNeutralizationSnapshot])
|
||||
@limiter.limit("30/minute")
|
||||
def get_neutralization_snapshots(
|
||||
request: Request,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> List[DataNeutralizationSnapshot]:
|
||||
"""Return neutralized-text snapshots (full text with placeholders) for the current feature instance."""
|
||||
try:
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return service.getSnapshots()
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting neutralization snapshots: {e}")
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/{feature_instance_id}/snapshots", response_model=List[DataNeutralizationSnapshot])
|
||||
@limiter.limit("30/minute")
|
||||
def get_neutralization_snapshots_scoped(
|
||||
request: Request,
|
||||
feature_instance_id: str = Path(..., description="Workspace instance id (must match X-Instance-Id when set)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> List[DataNeutralizationSnapshot]:
|
||||
"""Same as GET /snapshots; path includes instance id for workspace UI (explicit scope)."""
|
||||
_assertFeatureInstancePathMatchesContext(feature_instance_id, context)
|
||||
try:
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return service.getSnapshots()
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting neutralization snapshots (scoped): {e}")
|
||||
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/process-sharepoint", response_model=Dict[str, Any])
|
||||
@limiter.limit("5/minute")
|
||||
async def process_sharepoint_files(
|
||||
|
|
@ -317,6 +398,108 @@ def get_neutralization_stats(
|
|||
detail=f"Error getting neutralization stats: {str(e)}"
|
||||
)
|
||||
|
||||
def _deleteSingleNeutralizationAttribute(context: RequestContext, attributeId: str) -> Dict[str, str]:
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
success = service.deleteAttribute(attributeId)
|
||||
if success:
|
||||
return {"message": f"Attribute {attributeId} deleted"}
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Attribute {attributeId} not found",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/attributes/single/{attributeId}", response_model=Dict[str, str])
|
||||
@limiter.limit("30/minute")
|
||||
def deleteAttribute(
|
||||
request: Request,
|
||||
attributeId: str = Path(..., description="Attribute ID to delete"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, str]:
|
||||
"""Delete a single neutralization attribute by ID."""
|
||||
try:
|
||||
return _deleteSingleNeutralizationAttribute(context, attributeId)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting attribute: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/{feature_instance_id}/attributes/single/{attributeId}", response_model=Dict[str, str])
|
||||
@limiter.limit("30/minute")
|
||||
def deleteAttributeScoped(
|
||||
request: Request,
|
||||
feature_instance_id: str = Path(..., description="Workspace / feature instance id"),
|
||||
attributeId: str = Path(..., description="Attribute ID to delete"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, str]:
|
||||
"""Same as DELETE /attributes/single/{attributeId}; path includes instance id for workspace UI."""
|
||||
_assertFeatureInstancePathMatchesContext(feature_instance_id, context)
|
||||
try:
|
||||
return _deleteSingleNeutralizationAttribute(context, attributeId)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting attribute: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
def _retriggerNeutralizationBody(context: RequestContext, fileId: str) -> Dict[str, str]:
|
||||
if not fileId:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="fileId is required",
|
||||
)
|
||||
service = NeutralizationPlayground(
|
||||
context.user,
|
||||
str(context.mandateId) if context.mandateId else "",
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
service.cleanupFileAttributes(fileId)
|
||||
return {"message": f"Neutralization re-triggered for file {fileId}", "fileId": fileId}
|
||||
|
||||
|
||||
@router.post("/retrigger", response_model=Dict[str, str])
|
||||
@limiter.limit("10/minute")
|
||||
def retriggerNeutralization(
|
||||
request: Request,
|
||||
retriggerData: Dict[str, str] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, str]:
|
||||
"""Re-trigger neutralization for a specific file."""
|
||||
try:
|
||||
return _retriggerNeutralizationBody(context, retriggerData.get("fileId", ""))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error re-triggering neutralization: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/{feature_instance_id}/retrigger", response_model=Dict[str, str])
|
||||
@limiter.limit("10/minute")
|
||||
def retriggerNeutralizationScoped(
|
||||
request: Request,
|
||||
feature_instance_id: str = Path(..., description="Workspace / feature instance id"),
|
||||
retriggerData: Dict[str, str] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, str]:
|
||||
"""Same as POST /retrigger; path includes instance id for workspace UI compatibility."""
|
||||
_assertFeatureInstancePathMatchesContext(feature_instance_id, context)
|
||||
try:
|
||||
return _retriggerNeutralizationBody(context, retriggerData.get("fileId", ""))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error re-triggering neutralization: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/attributes/{fileId}", response_model=Dict[str, str])
|
||||
@limiter.limit("10/minute")
|
||||
def cleanup_file_attributes(
|
||||
|
|
|
|||
|
|
@ -60,6 +60,12 @@ class NeutralizationService:
|
|||
mandateId=serviceCenter.mandateId or dbApp.mandateId,
|
||||
featureInstanceId=getattr(serviceCenter, 'featureInstanceId', None) or getattr(dbApp, 'featureInstanceId', None)
|
||||
)
|
||||
elif serviceCenter and getattr(serviceCenter, "user", None):
|
||||
self.interfaceNeutralizer = getNeutralizerInterface(
|
||||
currentUser=serviceCenter.user,
|
||||
mandateId=getattr(serviceCenter, 'mandateId', None) or getattr(serviceCenter, 'mandate_id', None),
|
||||
featureInstanceId=getattr(serviceCenter, 'featureInstanceId', None) or getattr(serviceCenter, 'feature_instance_id', None),
|
||||
)
|
||||
|
||||
namesList = NamesToParse if isinstance(NamesToParse, list) else []
|
||||
self.NamesToParse = namesList
|
||||
|
|
@ -82,11 +88,213 @@ class NeutralizationService:
|
|||
|
||||
# Public API: process text or file
|
||||
|
||||
def processText(self, text: str) -> Dict[str, Any]:
|
||||
"""Neutralize a raw text string and return a standard result dict."""
|
||||
result = self._neutralizeText(text, 'text')
|
||||
self._persistAttributes(result.get('mapping', {}), None)
|
||||
return result
|
||||
_NEUT_INSTRUCTION = (
|
||||
"Analyze the following text and identify ALL sensitive content that must be neutralized:\n"
|
||||
"1. Personal data (PII): names of persons, email addresses, phone numbers, "
|
||||
"physical addresses, ID numbers, dates of birth, financial data (IBAN, account numbers), "
|
||||
"social security numbers\n"
|
||||
"2. Protected business logic: proprietary algorithms, trade secrets, confidential "
|
||||
"processes, internal procedures, code snippets that reveal implementation details\n"
|
||||
"3. Named entities: company names, product names, project names, brand names\n\n"
|
||||
"Return ONLY a JSON array (no markdown, no explanation):\n"
|
||||
'[{"text":"exact substring","type":"name|email|phone|address|id|financial|logic|company|product|location|other"}]\n\n'
|
||||
"Rules:\n"
|
||||
"- Every entry's 'text' must be an exact, verbatim substring of the input.\n"
|
||||
"- Do NOT include generic words, common language constructs or non-sensitive terms.\n"
|
||||
"- If nothing is sensitive, return [].\n\n"
|
||||
)
|
||||
_BYTES_PER_TOKEN = 3
|
||||
_SELECTOR_MAX_RATIO = 0.8
|
||||
_CHUNK_SAFETY_MARGIN = 0.9
|
||||
|
||||
def _resolveNeutModel(self):
|
||||
"""Query the model registry for the best NEUTRALIZATION_TEXT model.
|
||||
Returns the model object (with contextLength etc.) or None."""
|
||||
try:
|
||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
||||
from modules.aicore.aicoreModelSelector import modelSelector as _modSel
|
||||
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum
|
||||
|
||||
_models = modelRegistry.getAvailableModels()
|
||||
_opts = AiCallOptions(operationType=OperationTypeEnum.NEUTRALIZATION_TEXT)
|
||||
_failover = _modSel.getFailoverModelList("x", "", _opts, _models)
|
||||
return _failover[0] if _failover else None
|
||||
except Exception as _e:
|
||||
logger.warning(f"_resolveNeutModel failed: {_e}")
|
||||
return None
|
||||
|
||||
def _calcMaxChunkChars(self, model) -> int:
|
||||
"""Derive the maximum text-chunk size (in characters) from the selected
|
||||
model's contextLength, mirroring the rules in aicoreModelSelector:
|
||||
promptTokens = promptBytes / 3 must be <= contextLength * 0.8
|
||||
Subtract the instruction overhead and apply a safety margin."""
|
||||
if not model or getattr(model, 'contextLength', 0) <= 0:
|
||||
return 5000
|
||||
_instructionBytes = len(self._NEUT_INSTRUCTION.encode('utf-8')) + 30
|
||||
_maxPromptBytes = int(model.contextLength * self._SELECTOR_MAX_RATIO * self._BYTES_PER_TOKEN)
|
||||
_maxChunkChars = int((_maxPromptBytes - _instructionBytes) * self._CHUNK_SAFETY_MARGIN)
|
||||
return max(_maxChunkChars, 500)
|
||||
|
||||
@staticmethod
|
||||
def _splitTextIntoChunks(text: str, maxChars: int) -> List[str]:
|
||||
"""Split *text* into chunks of at most *maxChars*, preferring paragraph
|
||||
then sentence boundaries so that the LLM sees coherent blocks."""
|
||||
if len(text) <= maxChars:
|
||||
return [text]
|
||||
|
||||
chunks: List[str] = []
|
||||
remaining = text
|
||||
while remaining:
|
||||
if len(remaining) <= maxChars:
|
||||
chunks.append(remaining)
|
||||
break
|
||||
_cut = maxChars
|
||||
_para = remaining.rfind("\n\n", 0, _cut)
|
||||
if _para > maxChars // 3:
|
||||
_cut = _para + 2
|
||||
else:
|
||||
_nl = remaining.rfind("\n", 0, _cut)
|
||||
if _nl > maxChars // 3:
|
||||
_cut = _nl + 1
|
||||
else:
|
||||
_dot = remaining.rfind(". ", 0, _cut)
|
||||
if _dot > maxChars // 3:
|
||||
_cut = _dot + 2
|
||||
else:
|
||||
_sp = remaining.rfind(" ", 0, _cut)
|
||||
if _sp > maxChars // 3:
|
||||
_cut = _sp + 1
|
||||
chunks.append(remaining[:_cut])
|
||||
remaining = remaining[_cut:]
|
||||
return chunks
|
||||
|
||||
async def _analyseChunk(self, aiService, chunkText: str) -> List[dict]:
|
||||
"""Send one chunk to the NEUTRALIZATION_TEXT model, return raw findings list."""
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum
|
||||
|
||||
_prompt = self._NEUT_INSTRUCTION + "Text to analyze:\n---\n" + chunkText + "\n---"
|
||||
_request = AiCallRequest(
|
||||
prompt=_prompt,
|
||||
options=AiCallOptions(operationType=OperationTypeEnum.NEUTRALIZATION_TEXT),
|
||||
)
|
||||
_response = await aiService.callAi(_request)
|
||||
if not _response or not getattr(_response, 'content', None):
|
||||
raise RuntimeError(
|
||||
"Neutralization AI call returned no response "
|
||||
"(no model available for NEUTRALIZATION_TEXT?)"
|
||||
)
|
||||
if getattr(_response, 'errorCount', 0) > 0 or getattr(_response, 'modelName', '') == 'error':
|
||||
raise RuntimeError(
|
||||
f"Neutralization AI call failed: {_response.content}"
|
||||
)
|
||||
_content = _response.content.strip()
|
||||
if _content.startswith("```"):
|
||||
_content = _content.split("\n", 1)[-1].rsplit("```", 1)[0].strip()
|
||||
try:
|
||||
return json.loads(_content)
|
||||
except json.JSONDecodeError:
|
||||
_bracket = _content.find("[")
|
||||
if _bracket >= 0:
|
||||
try:
|
||||
return json.loads(_content[_bracket:])
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return []
|
||||
|
||||
async def processTextAsync(self, text: str, fileId: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""AI-powered text neutralization with automatic chunking.
|
||||
|
||||
If *text* exceeds the safe token budget for the neutralization model
|
||||
it is split into smaller chunks, each analysed separately. Findings
|
||||
are merged and de-duplicated before placeholder replacement.
|
||||
|
||||
Regex patterns run as a supplementary pass to catch anything the
|
||||
model missed.
|
||||
"""
|
||||
import uuid as _uuid
|
||||
|
||||
aiService = None
|
||||
if self._getService:
|
||||
try:
|
||||
aiService = self._getService("ai")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
aiMapping: Dict[str, str] = {}
|
||||
|
||||
if not aiService or not hasattr(aiService, 'callAi'):
|
||||
raise RuntimeError("Neutralization requires an AI service but none is available")
|
||||
|
||||
if text.strip():
|
||||
_neutModel = self._resolveNeutModel()
|
||||
_maxChunkChars = self._calcMaxChunkChars(_neutModel)
|
||||
logger.info(
|
||||
f"processTextAsync: model={getattr(_neutModel, 'name', '?')}, "
|
||||
f"contextLength={getattr(_neutModel, 'contextLength', '?')} tokens, "
|
||||
f"maxChunkChars={_maxChunkChars}"
|
||||
)
|
||||
|
||||
_chunks = self._splitTextIntoChunks(text, _maxChunkChars)
|
||||
if len(_chunks) > 1:
|
||||
logger.info(
|
||||
f"processTextAsync: text ({len(text)} chars) "
|
||||
f"split into {len(_chunks)} chunk(s) of max {_maxChunkChars} chars"
|
||||
)
|
||||
|
||||
for _chunkIdx, _chunkText in enumerate(_chunks):
|
||||
_findings = await self._analyseChunk(aiService, _chunkText)
|
||||
if not isinstance(_findings, list):
|
||||
continue
|
||||
for _f in _findings:
|
||||
if not isinstance(_f, dict):
|
||||
continue
|
||||
_origText = _f.get("text", "")
|
||||
_patType = _f.get("type", "other").lower()
|
||||
if not _origText or _origText not in text:
|
||||
continue
|
||||
if _origText in aiMapping:
|
||||
continue
|
||||
_uid = str(_uuid.uuid4())
|
||||
_placeholder = f"[{_patType}.{_uid}]"
|
||||
aiMapping[_origText] = _placeholder
|
||||
|
||||
logger.info(f"AI neutralization found {len(aiMapping)} item(s)"
|
||||
+ (f" across {len(_chunks)} chunk(s)" if len(_chunks) > 1 else ""))
|
||||
|
||||
neutralizedText = text
|
||||
for _orig, _ph in sorted(aiMapping.items(), key=lambda x: -len(x[0])):
|
||||
neutralizedText = neutralizedText.replace(_orig, _ph)
|
||||
|
||||
regexMapping: Dict[str, str] = {}
|
||||
finalText = neutralizedText
|
||||
|
||||
allMapping = {**aiMapping, **regexMapping}
|
||||
if allMapping:
|
||||
_loop = asyncio.get_event_loop()
|
||||
await _loop.run_in_executor(
|
||||
None, self._persistAttributes, allMapping, fileId
|
||||
)
|
||||
logger.debug(f"processTextAsync: {len(allMapping)} attribute(s) persisted")
|
||||
|
||||
return {
|
||||
'neutralized_text': finalText,
|
||||
'mapping': allMapping,
|
||||
'attributes': [
|
||||
NeutralizationAttribute(original=k, placeholder=v)
|
||||
for k, v in allMapping.items()
|
||||
],
|
||||
'processed_info': {'type': 'text', 'ai_findings': len(aiMapping), 'regex_findings': len(regexMapping)},
|
||||
}
|
||||
|
||||
def processText(self, text: str, fileId: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Sync wrapper around processTextAsync. Propagates errors."""
|
||||
try:
|
||||
return asyncio.run(self.processTextAsync(text, fileId))
|
||||
except RuntimeError as _re:
|
||||
if "cannot be called from a running event loop" in str(_re):
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_until_complete(self.processTextAsync(text, fileId))
|
||||
raise
|
||||
|
||||
def processFile(self, fileId: str) -> Dict[str, Any]:
|
||||
"""Neutralize a file referenced by its fileId using component interface.
|
||||
|
|
@ -153,8 +361,7 @@ class NeutralizationService:
|
|||
raise ValueError("Unable to decode file content as text.")
|
||||
textContent = decoded
|
||||
|
||||
result = self._neutralizeText(textContent, textType)
|
||||
self._persistAttributes(result.get('mapping', {}), fileId)
|
||||
result = self.processText(textContent, fileId)
|
||||
if fileName:
|
||||
result['neutralized_file_name'] = f"neutralized_{fileName}"
|
||||
result['file_id'] = fileId
|
||||
|
|
@ -203,6 +410,89 @@ class NeutralizationService:
|
|||
'processed_info': {'type': 'binary', 'status': 'error', 'error': str(e)}
|
||||
}
|
||||
|
||||
async def processImageAsync(self, imageBytes: bytes, fileName: str, mimeType: str = "image/png") -> Dict[str, Any]:
|
||||
"""Analyze image via internal vision model to check for sensitive content.
|
||||
|
||||
Returns dict with:
|
||||
- 'status': 'ok' | 'blocked' | 'error'
|
||||
- 'hasSensitiveContent': bool
|
||||
- 'analysis': str (model's analysis text, if available)
|
||||
- 'processed_info': dict with details
|
||||
|
||||
Uses NEUTRALIZATION_IMAGE operation type → only internal Private-LLM models.
|
||||
If no internal model available → returns 'blocked'.
|
||||
"""
|
||||
import base64
|
||||
try:
|
||||
aiService = None
|
||||
if self._getService:
|
||||
try:
|
||||
aiService = self._getService("ai")
|
||||
except Exception:
|
||||
pass
|
||||
if not aiService or not hasattr(aiService, 'callAi'):
|
||||
logger.warning(f"processImage: AI service not available — blocking image '{fileName}'")
|
||||
return {
|
||||
'status': 'blocked',
|
||||
'hasSensitiveContent': True,
|
||||
'analysis': '',
|
||||
'processed_info': {'type': 'image', 'status': 'blocked', 'reason': 'AI service unavailable'}
|
||||
}
|
||||
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum
|
||||
|
||||
_b64Data = base64.b64encode(imageBytes).decode('utf-8')
|
||||
_dataUrl = f"data:{mimeType};base64,{_b64Data}"
|
||||
|
||||
_prompt = (
|
||||
"Analyze this image for personally identifiable information (PII). "
|
||||
"Check for: names, addresses, phone numbers, email addresses, ID numbers, "
|
||||
"faces, signatures, handwritten text, license plates, financial data. "
|
||||
"Respond with JSON: {\"hasPII\": true/false, \"findings\": [\"...\"]}"
|
||||
)
|
||||
|
||||
_request = AiCallRequest(
|
||||
prompt=_prompt,
|
||||
options=AiCallOptions(operationType=OperationTypeEnum.NEUTRALIZATION_IMAGE),
|
||||
messages=[{"role": "user", "content": [
|
||||
{"type": "text", "text": _prompt},
|
||||
{"type": "image_url", "image_url": {"url": _dataUrl}},
|
||||
]}],
|
||||
)
|
||||
|
||||
_response = await aiService.callAi(_request)
|
||||
|
||||
_hasPII = False
|
||||
_analysis = _response.content if _response and hasattr(_response, 'content') else ''
|
||||
if _analysis:
|
||||
_lowerAnalysis = _analysis.lower()
|
||||
if '"haspii": true' in _lowerAnalysis or '"haspii":true' in _lowerAnalysis:
|
||||
_hasPII = True
|
||||
|
||||
return {
|
||||
'status': 'blocked' if _hasPII else 'ok',
|
||||
'hasSensitiveContent': _hasPII,
|
||||
'analysis': _analysis,
|
||||
'processed_info': {'type': 'image', 'status': 'blocked' if _hasPII else 'ok', 'fileName': fileName}
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"processImage failed for '{fileName}': {e}")
|
||||
return {
|
||||
'status': 'blocked',
|
||||
'hasSensitiveContent': True,
|
||||
'analysis': '',
|
||||
'processed_info': {'type': 'image', 'status': 'error', 'error': str(e)}
|
||||
}
|
||||
|
||||
def processImage(self, imageBytes: bytes, fileName: str, mimeType: str = "image/png") -> Dict[str, Any]:
|
||||
"""Sync wrapper for processImageAsync. Uses asyncio.run when no event loop is running."""
|
||||
import asyncio
|
||||
try:
|
||||
return asyncio.run(self.processImageAsync(imageBytes, fileName, mimeType))
|
||||
except RuntimeError:
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_until_complete(self.processImageAsync(imageBytes, fileName, mimeType))
|
||||
|
||||
def resolveText(self, text: str) -> str:
|
||||
if not self.interfaceNeutralizer:
|
||||
return text
|
||||
|
|
@ -236,6 +526,22 @@ class NeutralizationService:
|
|||
return False
|
||||
return self.interfaceNeutralizer.deleteNeutralizationAttributes(fileId)
|
||||
|
||||
def getSnapshots(self):
|
||||
if not self.interfaceNeutralizer:
|
||||
return []
|
||||
return self.interfaceNeutralizer.getSnapshots()
|
||||
|
||||
def clearSnapshots(self) -> int:
|
||||
if not self.interfaceNeutralizer:
|
||||
return 0
|
||||
return self.interfaceNeutralizer.clearSnapshots()
|
||||
|
||||
def saveSnapshot(self, sourceLabel: str, neutralizedText: str, placeholderCount: int = 0):
|
||||
if not self.interfaceNeutralizer:
|
||||
logger.warning("saveSnapshot: interfaceNeutralizer is None — snapshot not stored")
|
||||
return None
|
||||
return self.interfaceNeutralizer.createSnapshot(sourceLabel, neutralizedText, placeholderCount)
|
||||
|
||||
def _persistAttributes(self, mapping: Dict[str, str], fileId: Optional[str]) -> None:
|
||||
"""Persist mapping to DB for resolve to work. mapping: originalText -> placeholder e.g. '[email.uuid]'"""
|
||||
if not self.interfaceNeutralizer or not mapping:
|
||||
|
|
@ -295,10 +601,22 @@ class NeutralizationService:
|
|||
p = part if isinstance(part, dict) else part.model_dump() if hasattr(part, 'model_dump') else part
|
||||
type_group = p.get('typeGroup', '')
|
||||
data = p.get('data', '')
|
||||
if type_group in ('binary', 'image') or not (data and str(data).strip()):
|
||||
if type_group == 'binary' or not (data and str(data).strip()):
|
||||
neutralized_parts.append(part)
|
||||
continue
|
||||
nr = self._neutralizeText(str(data), 'text' if type_group != 'table' else 'csv')
|
||||
if type_group == 'image':
|
||||
import base64 as _b64img
|
||||
try:
|
||||
_imgBytes = _b64img.b64decode(str(data))
|
||||
_imgResult = await self.processImageAsync(_imgBytes, fileName)
|
||||
if _imgResult.get("status") == "ok":
|
||||
neutralized_parts.append(part)
|
||||
else:
|
||||
logger.warning(f"Image part blocked in binary file '{fileName}' (PII detected), removing")
|
||||
except Exception as _imgErr:
|
||||
logger.warning(f"Image check failed in binary file '{fileName}': {_imgErr}, removing (fail-safe)")
|
||||
continue
|
||||
nr = await self.processTextAsync(str(data), fileId)
|
||||
proc = nr.get('processed_info', {}) or {}
|
||||
if isinstance(proc, dict) and proc.get('type') == 'error':
|
||||
neutralization_error = proc.get('error', 'Neutralization failed')
|
||||
|
|
@ -307,7 +625,6 @@ class NeutralizationService:
|
|||
all_mapping.update(mapping)
|
||||
new_part = {**p, 'data': neu_text}
|
||||
neutralized_parts.append(new_part)
|
||||
self._persistAttributes(all_mapping, fileId)
|
||||
|
||||
# 3. PDF: Use in-place only; no fallback to render
|
||||
if mimeType == "application/pdf":
|
||||
|
|
@ -451,10 +768,31 @@ class NeutralizationService:
|
|||
|
||||
# Helper functions
|
||||
|
||||
def _neutralizeTextLight(self, text: str) -> Dict[str, Any]:
|
||||
"""Regex-only supplementary pass using already-initialised processors.
|
||||
|
||||
Unlike ``_neutralizeText`` this does **no** DB I/O
|
||||
(``_reloadNamesFromConfig`` is skipped) so it is safe to call from
|
||||
an async context without blocking the event-loop or risking a
|
||||
DB-connection-pool deadlock during parallel document processing.
|
||||
"""
|
||||
try:
|
||||
data, mapping, replaced_fields, processed_info = self.textProcessor.processTextContent(text)
|
||||
neutralized_text = str(data)
|
||||
attributes = [NeutralizationAttribute(original=k, placeholder=v) for k, v in mapping.items()]
|
||||
return NeutralizationResult(
|
||||
neutralized_text=neutralized_text,
|
||||
mapping=mapping,
|
||||
attributes=attributes,
|
||||
processed_info=processed_info,
|
||||
).model_dump()
|
||||
except Exception as e:
|
||||
logger.warning(f"_neutralizeTextLight error: {e}")
|
||||
return {'neutralized_text': text, 'mapping': {}, 'attributes': [], 'processed_info': {'type': 'error', 'error': str(e)}}
|
||||
|
||||
def _neutralizeText(self, text: str, textType: str = None) -> Dict[str, Any]:
|
||||
"""Process text and return unified dict for API consumption."""
|
||||
try:
|
||||
# Reload names from config before processing to ensure we have the latest names
|
||||
self._reloadNamesFromConfig()
|
||||
|
||||
# Auto-detect content type if not provided
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ Implements a general Swiss architecture planning data model.
|
|||
from typing import List, Dict, Any, Optional, ForwardRef
|
||||
from enum import Enum
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
|
@ -178,7 +179,7 @@ class Dokument(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class Kontext(BaseModel):
|
||||
class Kontext(PowerOnModel):
|
||||
"""Supporting data object for flexible additional information."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -248,7 +249,7 @@ class Land(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class Kanton(BaseModel):
|
||||
class Kanton(PowerOnModel):
|
||||
"""Cantonal level administrative entity."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -368,7 +369,7 @@ class Gemeinde(BaseModel):
|
|||
ParzelleRef = ForwardRef('Parzelle')
|
||||
|
||||
|
||||
class Parzelle(BaseModel):
|
||||
class Parzelle(PowerOnModel):
|
||||
"""Represents a plot with all building law properties."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -594,7 +595,7 @@ class Parzelle(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class Projekt(BaseModel):
|
||||
class Projekt(PowerOnModel):
|
||||
"""Core object representing a construction project."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
|
|||
|
|
@ -39,52 +39,57 @@ RESOURCE_OBJECTS = [
|
|||
# Template roles for this feature with AccessRules
|
||||
# IMPORTANT: item uses vollqualifizierte ObjectKeys (gemäss Navigation-API-Konzept)
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "realestate-viewer",
|
||||
"description": {
|
||||
"en": "Real Estate Viewer - View property information (read-only)",
|
||||
"de": "Immobilien-Betrachter - Immobilien-Informationen einsehen (nur lesen)",
|
||||
"fr": "Visualiseur immobilier - Consulter les informations immobilières (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.realestate.dashboard", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "realestate-user",
|
||||
"description": {
|
||||
"en": "Real Estate User - Create and manage own property records",
|
||||
"de": "Immobilien-Benutzer - Eigene Immobilien-Daten erstellen und verwalten",
|
||||
"fr": "Utilisateur immobilier - Créer et gérer ses propres données immobilières",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.realestate.dashboard", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
{"context": "RESOURCE", "item": "resource.feature.realestate.project.create", "view": True},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "realestate-admin",
|
||||
"description": {
|
||||
"en": "Real Estate Administrator - Full access to all property data and settings",
|
||||
"de": "Immobilien-Administrator - Vollzugriff auf alle Immobiliendaten und Einstellungen",
|
||||
"fr": "Administrateur immobilier - Accès complet aux données et paramètres"
|
||||
"fr": "Administrateur immobilier - Accès complet aux données et paramètres",
|
||||
},
|
||||
"accessRules": [
|
||||
# Full UI access (all views including admin views)
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
# Full DATA access
|
||||
{"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
|
||||
# Admin resources
|
||||
{"context": "RESOURCE", "item": "resource.feature.realestate.project.create", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.realestate.project.delete", "view": True},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "realestate-manager",
|
||||
"description": {
|
||||
"en": "Real Estate Manager - Manage properties and tenants",
|
||||
"de": "Immobilien-Verwalter - Immobilien und Mieter verwalten",
|
||||
"fr": "Gestionnaire immobilier - Gérer les propriétés et locataires"
|
||||
"fr": "Gestionnaire immobilier - Gérer les propriétés et locataires",
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to map view
|
||||
{"context": "UI", "item": "ui.feature.realestate.dashboard", "view": True},
|
||||
# Group-level DATA access
|
||||
{"context": "DATA", "item": None, "view": True, "read": "g", "create": "g", "update": "g", "delete": "g"},
|
||||
# Resource: create projects
|
||||
{"context": "RESOURCE", "item": "resource.feature.realestate.project.create", "view": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"roleLabel": "realestate-viewer",
|
||||
"description": {
|
||||
"en": "Real Estate Viewer - View property information",
|
||||
"de": "Immobilien-Betrachter - Immobilien-Informationen einsehen",
|
||||
"fr": "Visualiseur immobilier - Consulter les informations immobilières"
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to map view (read-only)
|
||||
{"context": "UI", "item": "ui.feature.realestate.dashboard", "view": True},
|
||||
# Read-only DATA access (my records)
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
]
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ from pydantic import BaseModel, Field
|
|||
from enum import Enum
|
||||
import uuid
|
||||
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enums
|
||||
|
|
@ -72,7 +74,7 @@ class TeamsbotTransferMode(str, Enum):
|
|||
# Database Models (stored in PostgreSQL)
|
||||
# ============================================================================
|
||||
|
||||
class TeamsbotSession(BaseModel):
|
||||
class TeamsbotSession(PowerOnModel):
|
||||
"""A Teams Bot meeting session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Session ID")
|
||||
instanceId: str = Field(description="Feature instance ID (FK)")
|
||||
|
|
@ -90,11 +92,9 @@ class TeamsbotSession(BaseModel):
|
|||
errorMessage: Optional[str] = Field(default=None, description="Error message if status is ERROR")
|
||||
transcriptSegmentCount: int = Field(default=0, description="Number of transcript segments in this session")
|
||||
botResponseCount: int = Field(default=0, description="Number of bot responses in this session")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
|
||||
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
|
||||
|
||||
|
||||
class TeamsbotTranscript(BaseModel):
|
||||
class TeamsbotTranscript(PowerOnModel):
|
||||
"""A single transcript segment from the meeting."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Transcript segment ID")
|
||||
sessionId: str = Field(description="Session ID (FK)")
|
||||
|
|
@ -105,10 +105,9 @@ class TeamsbotTranscript(BaseModel):
|
|||
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
|
||||
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
|
||||
source: Optional[str] = Field(default=None, description="Source: caption, audioCapture, chat, chatHistory, speakerHint")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
|
||||
|
||||
|
||||
class TeamsbotBotResponse(BaseModel):
|
||||
class TeamsbotBotResponse(PowerOnModel):
|
||||
"""A bot response generated during a meeting session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Response ID")
|
||||
sessionId: str = Field(description="Session ID (FK)")
|
||||
|
|
@ -121,14 +120,13 @@ class TeamsbotBotResponse(BaseModel):
|
|||
processingTime: float = Field(default=0.0, description="Processing time in seconds")
|
||||
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
|
||||
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# System Bot Accounts (stored in PostgreSQL, credentials encrypted)
|
||||
# ============================================================================
|
||||
|
||||
class TeamsbotSystemBot(BaseModel):
|
||||
class TeamsbotSystemBot(PowerOnModel):
|
||||
"""A system bot account for authenticated meeting joins.
|
||||
Credentials are stored encrypted in the database, NOT in the UI-visible config.
|
||||
Only mandate admins can manage system bots."""
|
||||
|
|
@ -138,15 +136,13 @@ class TeamsbotSystemBot(BaseModel):
|
|||
email: str = Field(description="Microsoft account email")
|
||||
encryptedPassword: str = Field(description="Encrypted Microsoft account password")
|
||||
isActive: bool = Field(default=True, description="Whether this bot account is active")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
|
||||
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# User Account Credentials (stored in PostgreSQL, credentials encrypted)
|
||||
# ============================================================================
|
||||
|
||||
class TeamsbotUserAccount(BaseModel):
|
||||
class TeamsbotUserAccount(PowerOnModel):
|
||||
"""Saved Microsoft credentials for 'Mein Account' joins.
|
||||
Each user can store their own MS credentials per mandate.
|
||||
Password is encrypted; on login only MFA confirmation is needed."""
|
||||
|
|
@ -156,15 +152,13 @@ class TeamsbotUserAccount(BaseModel):
|
|||
email: str = Field(description="Microsoft account email")
|
||||
encryptedPassword: str = Field(description="Encrypted Microsoft account password")
|
||||
displayName: Optional[str] = Field(default=None, description="Display name derived from MS account")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
|
||||
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Per-User Settings (stored in PostgreSQL, per user per instance)
|
||||
# ============================================================================
|
||||
|
||||
class TeamsbotUserSettings(BaseModel):
|
||||
class TeamsbotUserSettings(PowerOnModel):
|
||||
"""Per-user settings for the Teams Bot feature.
|
||||
Each user has their own settings per feature instance.
|
||||
These override the instance-level defaults (TeamsbotConfig)."""
|
||||
|
|
@ -182,8 +176,6 @@ class TeamsbotUserSettings(BaseModel):
|
|||
triggerCooldownSeconds: Optional[int] = Field(default=None, description="Trigger cooldown override")
|
||||
contextWindowSegments: Optional[int] = Field(default=None, description="Context window override")
|
||||
debugMode: Optional[bool] = Field(default=None, description="Debug mode override")
|
||||
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
|
||||
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from typing import Dict, Any, List, Optional
|
|||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
from .datamodelTeamsbot import (
|
||||
|
|
@ -104,13 +103,10 @@ class TeamsbotObjects:
|
|||
|
||||
def createSession(self, sessionData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
sessionData["creationDate"] = getIsoTimestamp()
|
||||
sessionData["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotSession, sessionData)
|
||||
|
||||
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update session fields."""
|
||||
updates["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordModify(TeamsbotSession, sessionId, updates)
|
||||
|
||||
def deleteSession(self, sessionId: str) -> bool:
|
||||
|
|
@ -149,7 +145,6 @@ class TeamsbotObjects:
|
|||
|
||||
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new transcript segment."""
|
||||
transcriptData["creationDate"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotTranscript, transcriptData)
|
||||
|
||||
def updateTranscript(self, transcriptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
|
|
@ -180,7 +175,6 @@ class TeamsbotObjects:
|
|||
|
||||
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new bot response record."""
|
||||
responseData["creationDate"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotBotResponse, responseData)
|
||||
|
||||
def _deleteResponsesBySession(self, sessionId: str) -> int:
|
||||
|
|
@ -216,13 +210,10 @@ class TeamsbotObjects:
|
|||
|
||||
def createSystemBot(self, botData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new system bot account."""
|
||||
botData["creationDate"] = getIsoTimestamp()
|
||||
botData["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotSystemBot, botData)
|
||||
|
||||
def updateSystemBot(self, botId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update a system bot account."""
|
||||
updates["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordModify(TeamsbotSystemBot, botId, updates)
|
||||
|
||||
def deleteSystemBot(self, botId: str) -> bool:
|
||||
|
|
@ -243,13 +234,10 @@ class TeamsbotObjects:
|
|||
|
||||
def createUserSettings(self, settingsData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create user settings."""
|
||||
settingsData["creationDate"] = getIsoTimestamp()
|
||||
settingsData["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotUserSettings, settingsData)
|
||||
|
||||
def updateUserSettings(self, settingsId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update user settings."""
|
||||
updates["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordModify(TeamsbotUserSettings, settingsId, updates)
|
||||
|
||||
def deleteUserSettings(self, settingsId: str) -> bool:
|
||||
|
|
@ -270,13 +258,10 @@ class TeamsbotObjects:
|
|||
|
||||
def createUserAccount(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create saved MS credentials."""
|
||||
data["creationDate"] = getIsoTimestamp()
|
||||
data["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(TeamsbotUserAccount, data)
|
||||
|
||||
def updateUserAccount(self, accountId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update saved MS credentials."""
|
||||
updates["lastModified"] = getIsoTimestamp()
|
||||
return self.db.recordModify(TeamsbotUserAccount, accountId, updates)
|
||||
|
||||
def deleteUserAccount(self, accountId: str) -> bool:
|
||||
|
|
|
|||
|
|
@ -39,17 +39,32 @@ DATA_OBJECTS = [
|
|||
{
|
||||
"objectKey": "data.feature.teamsbot.TeamsbotSession",
|
||||
"label": {"en": "Session", "de": "Sitzung", "fr": "Session"},
|
||||
"meta": {"table": "TeamsbotSession", "fields": ["id", "meetingLink", "botName", "status", "startedAt", "endedAt"]}
|
||||
"meta": {
|
||||
"table": "TeamsbotSession",
|
||||
"fields": ["id", "meetingLink", "botName", "status", "startedAt", "endedAt"],
|
||||
"isParent": True,
|
||||
"displayFields": ["botName", "status", "startedAt"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.teamsbot.TeamsbotTranscript",
|
||||
"label": {"en": "Transcript", "de": "Transkript", "fr": "Transcription"},
|
||||
"meta": {"table": "TeamsbotTranscript", "fields": ["id", "sessionId", "speaker", "text", "timestamp"]}
|
||||
"meta": {
|
||||
"table": "TeamsbotTranscript",
|
||||
"fields": ["id", "sessionId", "speaker", "text", "timestamp"],
|
||||
"parentTable": "TeamsbotSession",
|
||||
"parentKey": "sessionId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.teamsbot.TeamsbotBotResponse",
|
||||
"label": {"en": "Bot Response", "de": "Bot-Antwort", "fr": "Réponse du bot"},
|
||||
"meta": {"table": "TeamsbotBotResponse", "fields": ["id", "sessionId", "responseText", "detectedIntent"]}
|
||||
"meta": {
|
||||
"table": "TeamsbotBotResponse",
|
||||
"fields": ["id", "sessionId", "responseText", "detectedIntent"],
|
||||
"parentTable": "TeamsbotSession",
|
||||
"parentKey": "sessionId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.teamsbot.*",
|
||||
|
|
@ -103,25 +118,35 @@ TEMPLATE_ROLES = [
|
|||
{"context": "RESOURCE", "item": "resource.feature.teamsbot.config.edit", "view": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"roleLabel": "teamsbot-viewer",
|
||||
"description": {
|
||||
"en": "Teams Bot Viewer - View sessions and transcripts (read-only)",
|
||||
"de": "Teams Bot Betrachter - Sitzungen und Transkripte ansehen (nur lesen)",
|
||||
"fr": "Visualiseur Teams Bot - Consulter les sessions et transcriptions (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.teamsbot.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.teamsbot.sessions", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "teamsbot-user",
|
||||
"description": {
|
||||
"en": "Teams Bot User - Can start/stop sessions and view transcripts",
|
||||
"de": "Teams Bot Benutzer - Kann Sitzungen starten/stoppen und Transkripte einsehen",
|
||||
"fr": "Utilisateur Teams Bot - Peut démarrer/arrêter des sessions et voir les transcriptions"
|
||||
"fr": "Utilisateur Teams Bot - Peut démarrer/arrêter des sessions et voir les transcriptions",
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to dashboard and sessions (not settings)
|
||||
{"context": "UI", "item": "ui.feature.teamsbot.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.teamsbot.sessions", "view": True},
|
||||
# Own records only
|
||||
{"context": "DATA", "item": "data.feature.teamsbot.TeamsbotSession", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.teamsbot.TeamsbotTranscript", "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.teamsbot.TeamsbotBotResponse", "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
# Start and stop sessions
|
||||
{"context": "RESOURCE", "item": "resource.feature.teamsbot.session.start", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.teamsbot.session.stop", "view": True},
|
||||
]
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
@ -132,7 +157,7 @@ def getFeatureDefinition() -> Dict[str, Any]:
|
|||
"code": FEATURE_CODE,
|
||||
"label": FEATURE_LABEL,
|
||||
"icon": FEATURE_ICON,
|
||||
"autoCreateInstance": True,
|
||||
"autoCreateInstance": False,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ from fastapi import WebSocket
|
|||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
from modules.shared.timeUtils import getUtcTimestamp, getIsoTimestamp
|
||||
from modules.serviceCenter import getService as _getServiceCenterService
|
||||
from modules.serviceCenter.context import ServiceCenterContext
|
||||
|
||||
from .datamodelTeamsbot import (
|
||||
TeamsbotSessionStatus,
|
||||
|
|
@ -35,18 +37,18 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
# =========================================================================
|
||||
# Minimal Service Context (for AI billing in bridge callbacks)
|
||||
# AI Service Factory (for billing-aware AI calls)
|
||||
# =========================================================================
|
||||
|
||||
class _ServiceContext:
|
||||
"""Minimal context providing user/mandate info for AiService billing.
|
||||
Used by bridge callbacks where a full Services instance is not available."""
|
||||
|
||||
def __init__(self, user, mandateId, featureInstanceId=None):
|
||||
self.user = user
|
||||
self.mandateId = mandateId
|
||||
self.featureInstanceId = featureInstanceId
|
||||
self.featureCode = "teamsbot"
|
||||
def _createAiService(user, mandateId, featureInstanceId=None):
|
||||
"""Create a properly wired AiService via the service center."""
|
||||
ctx = ServiceCenterContext(
|
||||
user=user,
|
||||
mandate_id=mandateId,
|
||||
feature_instance_id=featureInstanceId,
|
||||
feature_code="teamsbot",
|
||||
)
|
||||
return _getServiceCenterService("ai", ctx)
|
||||
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -1062,11 +1064,7 @@ class TeamsbotService:
|
|||
|
||||
# Call SPEECH_TEAMS
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
||||
|
||||
# Create minimal service context for AI billing
|
||||
serviceContext = _ServiceContext(self.currentUser, self.mandateId, self.instanceId)
|
||||
aiService = AiService(serviceCenter=serviceContext)
|
||||
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
|
||||
await aiService.ensureAiObjectsInitialized()
|
||||
|
||||
request = AiCallRequest(
|
||||
|
|
@ -1684,11 +1682,7 @@ class TeamsbotService:
|
|||
"""Summarize a long user-provided session context to its essential points.
|
||||
This reduces token usage in every subsequent AI call."""
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
|
||||
serviceContext = _ServiceContext(self.currentUser, self.mandateId, self.instanceId)
|
||||
aiService = AiService(serviceCenter=serviceContext)
|
||||
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
|
||||
await aiService.ensureAiObjectsInitialized()
|
||||
|
||||
request = AiCallRequest(
|
||||
|
|
@ -1738,11 +1732,7 @@ class TeamsbotService:
|
|||
lines.append(f"[{speaker}]: {text}")
|
||||
textToSummarize = "\n".join(lines)
|
||||
|
||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
|
||||
serviceContext = _ServiceContext(self.currentUser, self.mandateId, self.instanceId)
|
||||
aiService = AiService(serviceCenter=serviceContext)
|
||||
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
|
||||
await aiService.ensureAiObjectsInitialized()
|
||||
|
||||
request = AiCallRequest(
|
||||
|
|
@ -1783,10 +1773,7 @@ class TeamsbotService:
|
|||
for t in transcripts
|
||||
)
|
||||
|
||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
||||
|
||||
serviceContext = _ServiceContext(self.currentUser, self.mandateId, self.instanceId)
|
||||
aiService = AiService(serviceCenter=serviceContext)
|
||||
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
|
||||
await aiService.ensureAiObjectsInitialized()
|
||||
|
||||
request = AiCallRequest(
|
||||
|
|
|
|||
|
|
@ -5,11 +5,13 @@
|
|||
from enum import Enum
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
import uuid
|
||||
|
||||
|
||||
class TrusteeOrganisation(BaseModel):
|
||||
class TrusteeOrganisation(PowerOnModel):
|
||||
"""Represents trustee organisations (companies) within the Trustee feature."""
|
||||
id: str = Field( # Unique string label (PK), not UUID
|
||||
description="Unique organisation identifier (label)",
|
||||
|
|
@ -55,7 +57,7 @@ class TrusteeOrganisation(BaseModel):
|
|||
}
|
||||
)
|
||||
# System attributes are automatically set by DatabaseConnector:
|
||||
# _createdAt, _modifiedAt, _createdBy, _modifiedBy
|
||||
# sysCreatedAt, sysModifiedAt, sysCreatedBy, sysModifiedBy (PowerOnModel)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -71,7 +73,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeRole(BaseModel):
|
||||
class TrusteeRole(PowerOnModel):
|
||||
"""Defines roles within the Trustee feature."""
|
||||
id: str = Field( # Unique string label (PK), not UUID
|
||||
description="Unique role identifier (label)",
|
||||
|
|
@ -122,7 +124,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeAccess(BaseModel):
|
||||
class TrusteeAccess(PowerOnModel):
|
||||
"""Defines user access to organisations with specific roles."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -207,7 +209,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeContract(BaseModel):
|
||||
class TrusteeContract(PowerOnModel):
|
||||
"""Defines customer contracts within organisations."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
|
|
@ -289,7 +291,7 @@ class TrusteeDocumentTypeEnum(str, Enum):
|
|||
AUTO = "auto"
|
||||
|
||||
|
||||
class TrusteeDocument(BaseModel):
|
||||
class TrusteeDocument(PowerOnModel):
|
||||
"""Contains document references for bookings.
|
||||
|
||||
Documents reference files in the central Files table via fileId.
|
||||
|
|
@ -413,7 +415,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteePosition(BaseModel):
|
||||
class TrusteePosition(PowerOnModel):
|
||||
"""Contains booking positions (expense entries).
|
||||
|
||||
A position can have up to two document references: documentId (Beleg) and bankDocumentId (Bank-Referenz).
|
||||
|
|
@ -696,10 +698,6 @@ class TrusteePosition(BaseModel):
|
|||
}
|
||||
)
|
||||
|
||||
# Allow extra fields like _createdAt from database
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"TrusteePosition",
|
||||
{"en": "Position", "fr": "Position", "de": "Position"},
|
||||
|
|
@ -739,7 +737,7 @@ registerModelLabels(
|
|||
# ── TrusteeData* tables (synced from external accounting apps for analysis) ──
|
||||
|
||||
|
||||
class TrusteeDataAccount(BaseModel):
|
||||
class TrusteeDataAccount(PowerOnModel):
|
||||
"""Chart of accounts synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
accountNumber: str = Field(description="Account number (e.g. '1020')")
|
||||
|
|
@ -769,7 +767,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeDataJournalEntry(BaseModel):
|
||||
class TrusteeDataJournalEntry(PowerOnModel):
|
||||
"""Journal entry header synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
externalId: Optional[str] = Field(default=None, description="ID in the source system")
|
||||
|
|
@ -799,7 +797,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeDataJournalLine(BaseModel):
|
||||
class TrusteeDataJournalLine(PowerOnModel):
|
||||
"""Journal entry line (debit/credit) synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id")
|
||||
|
|
@ -833,7 +831,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeDataContact(BaseModel):
|
||||
class TrusteeDataContact(PowerOnModel):
|
||||
"""Customer or vendor synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
externalId: Optional[str] = Field(default=None, description="ID in the source system")
|
||||
|
|
@ -873,7 +871,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeDataAccountBalance(BaseModel):
|
||||
class TrusteeDataAccountBalance(PowerOnModel):
|
||||
"""Account balance per period, derived from journal lines or directly from accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
accountNumber: str = Field(description="Account number")
|
||||
|
|
@ -907,7 +905,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeAccountingConfig(BaseModel):
|
||||
class TrusteeAccountingConfig(PowerOnModel):
|
||||
"""Per-instance accounting system configuration with encrypted credentials.
|
||||
|
||||
Each feature instance can connect to exactly one accounting system.
|
||||
|
|
@ -946,7 +944,7 @@ registerModelLabels(
|
|||
)
|
||||
|
||||
|
||||
class TrusteeAccountingSync(BaseModel):
|
||||
class TrusteeAccountingSync(PowerOnModel):
|
||||
"""Tracks which position was synced to which external system and when.
|
||||
|
||||
Used for duplicate prevention, audit trail, and retry logic.
|
||||
|
|
|
|||
|
|
@ -1152,7 +1152,7 @@ class TrusteeObjects:
|
|||
logger.warning(f"Document {documentId} not found")
|
||||
return None
|
||||
|
||||
createdBy = existing.get("_createdBy")
|
||||
createdBy = existing.get("sysCreatedBy")
|
||||
|
||||
# Check system RBAC permission (userreport can only edit their own records)
|
||||
if not self.checkCombinedPermission(TrusteeDocument, "update", recordCreatedBy=createdBy):
|
||||
|
|
@ -1178,7 +1178,7 @@ class TrusteeObjects:
|
|||
logger.warning(f"Document {documentId} not found")
|
||||
return False
|
||||
|
||||
createdBy = existing.get("_createdBy")
|
||||
createdBy = existing.get("sysCreatedBy")
|
||||
|
||||
if not self.checkCombinedPermission(TrusteeDocument, "delete", recordCreatedBy=createdBy):
|
||||
logger.warning(f"User {self.userId} lacks permission to delete document")
|
||||
|
|
@ -1198,7 +1198,7 @@ class TrusteeObjects:
|
|||
|
||||
def _toTrusteePositionOrDelete(self, rawRecord: Dict[str, Any], deleteCorrupt: bool = True) -> Optional[TrusteePosition]:
|
||||
"""Build TrusteePosition safely; optionally delete irreparably corrupt records."""
|
||||
cleanRecord = {k: v for k, v in (rawRecord or {}).items() if not k.startswith("_") or k == "_createdAt"}
|
||||
cleanRecord = {k: v for k, v in (rawRecord or {}).items() if not k.startswith("_") or k == "sysCreatedAt"}
|
||||
if not cleanRecord:
|
||||
return None
|
||||
|
||||
|
|
@ -1271,7 +1271,7 @@ class TrusteeObjects:
|
|||
"""Get all positions with RBAC filtering and optional DB-level pagination.
|
||||
|
||||
Filtering, sorting, and pagination are handled at the SQL level.
|
||||
Post-processing cleans internal fields (keeps _createdAt) and validates
|
||||
Post-processing cleans internal fields (keeps sysCreatedAt) and validates
|
||||
each record via _toTrusteePositionOrDelete (corrupt rows are deleted).
|
||||
|
||||
NOTE(post-process): totalItems may slightly overcount when corrupt legacy
|
||||
|
|
@ -1288,7 +1288,7 @@ class TrusteeObjects:
|
|||
featureCode=self.FEATURE_CODE
|
||||
)
|
||||
|
||||
keepFields = {'_createdAt'}
|
||||
keepFields = {'sysCreatedAt'}
|
||||
|
||||
def _cleanAndValidate(records):
|
||||
items = []
|
||||
|
|
@ -1369,7 +1369,7 @@ class TrusteeObjects:
|
|||
logger.warning(f"Position {positionId} not found")
|
||||
return None
|
||||
|
||||
createdBy = existing.get("_createdBy")
|
||||
createdBy = existing.get("sysCreatedBy")
|
||||
|
||||
# Check system RBAC permission (userreport can only edit their own records)
|
||||
if not self.checkCombinedPermission(TrusteePosition, "update", recordCreatedBy=createdBy):
|
||||
|
|
@ -1391,7 +1391,7 @@ class TrusteeObjects:
|
|||
logger.warning(f"Position {positionId} not found")
|
||||
return False
|
||||
|
||||
createdBy = existing.get("_createdBy")
|
||||
createdBy = existing.get("sysCreatedBy")
|
||||
|
||||
if not self.checkCombinedPermission(TrusteePosition, "delete", recordCreatedBy=createdBy):
|
||||
logger.warning(f"User {self.userId} lacks permission to delete position")
|
||||
|
|
|
|||
|
|
@ -58,10 +58,25 @@ UI_OBJECTS = [
|
|||
# DATA Objects for RBAC catalog (tables/entities)
|
||||
# Used for AccessRules on data-level permissions
|
||||
DATA_OBJECTS = [
|
||||
{
|
||||
"objectKey": "data.feature.trustee.TrusteeOrganisation",
|
||||
"label": {"en": "Organisation", "de": "Organisation", "fr": "Organisation"},
|
||||
"meta": {
|
||||
"table": "TrusteeOrganisation",
|
||||
"fields": ["id", "label", "enabled"],
|
||||
"isParent": True,
|
||||
"displayFields": ["label"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.trustee.TrusteePosition",
|
||||
"label": {"en": "Position", "de": "Position", "fr": "Position"},
|
||||
"meta": {"table": "TrusteePosition", "fields": ["id", "label", "description", "organisationId"]}
|
||||
"meta": {
|
||||
"table": "TrusteePosition",
|
||||
"fields": ["id", "label", "description", "organisationId"],
|
||||
"parentTable": "TrusteeOrganisation",
|
||||
"parentKey": "organisationId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.trustee.TrusteeDocument",
|
||||
|
|
@ -71,7 +86,12 @@ DATA_OBJECTS = [
|
|||
{
|
||||
"objectKey": "data.feature.trustee.TrusteeAccountingConfig",
|
||||
"label": {"en": "Accounting Config", "de": "Buchhaltungs-Konfiguration", "fr": "Config. comptable"},
|
||||
"meta": {"table": "TrusteeAccountingConfig", "fields": ["id", "connectorType", "displayLabel", "encryptedConfig", "isActive"]}
|
||||
"meta": {
|
||||
"table": "TrusteeAccountingConfig",
|
||||
"fields": ["id", "connectorType", "displayLabel", "encryptedConfig", "isActive"],
|
||||
"parentTable": "TrusteeOrganisation",
|
||||
"parentKey": "organisationId",
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.trustee.TrusteeAccountingSync",
|
||||
|
|
@ -170,60 +190,81 @@ RESOURCE_OBJECTS = [
|
|||
# Note: UI item=None means ALL views, specific items restrict to named views
|
||||
# IMPORTANT: item uses vollqualifizierte ObjectKeys (gemäss Navigation-API-Konzept)
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "trustee-viewer",
|
||||
"description": {
|
||||
"en": "Trustee Viewer - View trustee data (read-only)",
|
||||
"de": "Treuhand-Betrachter - Treuhand-Daten einsehen (nur lesen)",
|
||||
"fr": "Visualiseur fiduciaire - Consulter les données fiduciaires (lecture seule)",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "trustee-user",
|
||||
"description": {
|
||||
"en": "Trustee User - Create and manage own trustee records",
|
||||
"de": "Treuhand-Benutzer - Eigene Treuhand-Daten erstellen und verwalten",
|
||||
"fr": "Utilisateur fiduciaire - Créer et gérer ses propres données fiduciaires",
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.expense-import", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "trustee-admin",
|
||||
"description": {
|
||||
"en": "Trustee Administrator - Full access to all trustee data and settings",
|
||||
"de": "Treuhand-Administrator - Vollzugriff auf alle Treuhand-Daten und Einstellungen",
|
||||
"fr": "Administrateur fiduciaire - Accès complet aux données et paramètres fiduciaires"
|
||||
"fr": "Administrateur fiduciaire - Accès complet aux données et paramètres fiduciaires",
|
||||
},
|
||||
"accessRules": [
|
||||
# Full UI access (all views including admin views)
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
# Full DATA access
|
||||
{"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
|
||||
# Admin resource: manage instance roles
|
||||
{"context": "RESOURCE", "item": "resource.feature.trustee.instance-roles.manage", "view": True},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "trustee-accountant",
|
||||
"description": {
|
||||
"en": "Trustee Accountant - Manage accounting and financial data",
|
||||
"de": "Treuhand-Buchhalter - Buchhaltungs- und Finanzdaten verwalten",
|
||||
"fr": "Comptable fiduciaire - Gérer les données comptables et financières"
|
||||
"fr": "Comptable fiduciaire - Gérer les données comptables et financières",
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to main views (not admin views, not expense-import) - vollqualifizierte ObjectKeys
|
||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.settings", "view": True},
|
||||
# Group-level DATA access
|
||||
{"context": "DATA", "item": None, "view": True, "read": "g", "create": "g", "update": "g", "delete": "g"},
|
||||
# Accounting sync permission
|
||||
{"context": "RESOURCE", "item": "resource.feature.trustee.accounting.sync", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.trustee.accounting.view", "view": True},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "trustee-client",
|
||||
"description": {
|
||||
"en": "Trustee Client - View own accounting data and documents",
|
||||
"de": "Treuhand-Kunde - Eigene Buchhaltungsdaten und Dokumente einsehen",
|
||||
"fr": "Client fiduciaire - Consulter ses propres données comptables et documents"
|
||||
"fr": "Client fiduciaire - Consulter ses propres données comptables et documents",
|
||||
},
|
||||
"accessRules": [
|
||||
# UI access to main views + expense-import - vollqualifizierte ObjectKeys
|
||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.expense-import", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.trustee.scan-upload", "view": True},
|
||||
# Own records only (MY level)
|
||||
{"context": "DATA", "item": "data.feature.trustee.TrusteePosition", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.trustee.TrusteeDocument", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
]
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,30 +1,15 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Workspace feature data models — VoiceSettings and WorkspaceUserSettings."""
|
||||
"""Workspace feature data models — WorkspaceUserSettings."""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
import uuid
|
||||
|
||||
|
||||
class VoiceSettings(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
userId: str = Field(description="ID of the user these settings belong to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
mandateId: str = Field(description="ID of the mandate these settings belong to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
featureInstanceId: str = Field(description="ID of the feature instance these settings belong to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
sttLanguage: str = Field(default="de-DE", description="Speech-to-Text language", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": True})
|
||||
ttsLanguage: str = Field(default="de-DE", description="Text-to-Speech language", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": True})
|
||||
ttsVoice: str = Field(default="de-DE-KatjaNeural", description="Text-to-Speech voice", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": True})
|
||||
ttsVoiceMap: Dict[str, Any] = Field(default_factory=dict, description="Per-language voice mapping, e.g. {'de-DE': {'voiceName': 'de-DE-Wavenet-A'}, 'en-US': {'voiceName': 'en-US-Wavenet-C'}}", json_schema_extra={"frontend_type": "json", "frontend_readonly": False, "frontend_required": False})
|
||||
translationEnabled: bool = Field(default=True, description="Whether translation is enabled", json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False})
|
||||
targetLanguage: str = Field(default="en-US", description="Target language for translation", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False})
|
||||
creationDate: float = Field(default_factory=getUtcTimestamp, description="Date when the settings were created (UTC timestamp in seconds)", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
|
||||
lastModified: float = Field(default_factory=getUtcTimestamp, description="Date when the settings were last modified (UTC timestamp in seconds)", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
|
||||
|
||||
|
||||
class WorkspaceUserSettings(BaseModel):
|
||||
class WorkspaceUserSettings(PowerOnModel):
|
||||
"""Per-user workspace settings. None values mean 'use instance default'."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
userId: str = Field(description="User ID", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
|
||||
|
|
@ -33,25 +18,6 @@ class WorkspaceUserSettings(BaseModel):
|
|||
maxAgentRounds: Optional[int] = Field(default=None, description="Max agent rounds override (None = instance default)", json_schema_extra={"frontend_type": "number", "frontend_readonly": False, "frontend_required": False})
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
"VoiceSettings",
|
||||
{"en": "Voice Settings", "fr": "Paramètres vocaux"},
|
||||
{
|
||||
"id": {"en": "ID", "fr": "ID"},
|
||||
"userId": {"en": "User ID", "fr": "ID utilisateur"},
|
||||
"mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
|
||||
"featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
|
||||
"sttLanguage": {"en": "STT Language", "fr": "Langue STT"},
|
||||
"ttsLanguage": {"en": "TTS Language", "fr": "Langue TTS"},
|
||||
"ttsVoice": {"en": "TTS Voice", "fr": "Voix TTS"},
|
||||
"ttsVoiceMap": {"en": "TTS Voice Map", "fr": "Carte des voix TTS"},
|
||||
"translationEnabled": {"en": "Translation Enabled", "fr": "Traduction activée"},
|
||||
"targetLanguage": {"en": "Target Language", "fr": "Langue cible"},
|
||||
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
|
||||
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
|
||||
},
|
||||
)
|
||||
|
||||
registerModelLabels(
|
||||
"WorkspaceUserSettings",
|
||||
{"en": "Workspace User Settings", "de": "Workspace Benutzereinstellungen"},
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Interface for Workspace feature — manages VoiceSettings and WorkspaceUserSettings.
|
||||
Interface for Workspace feature — manages WorkspaceUserSettings.
|
||||
Uses a dedicated poweron_workspace database.
|
||||
"""
|
||||
|
||||
|
|
@ -10,11 +10,10 @@ from typing import Dict, Any, Optional
|
|||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workspace.datamodelFeatureWorkspace import VoiceSettings, WorkspaceUserSettings
|
||||
from modules.features.workspace.datamodelFeatureWorkspace import WorkspaceUserSettings
|
||||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
||||
from modules.security.rbac import RbacClass
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -62,122 +61,6 @@ class WorkspaceObjects:
|
|||
self.featureInstanceId = featureInstanceId
|
||||
self.db.updateContext(self.userId)
|
||||
|
||||
# =========================================================================
|
||||
# VoiceSettings CRUD
|
||||
# =========================================================================
|
||||
|
||||
def getVoiceSettings(self, userId: Optional[str] = None) -> Optional[VoiceSettings]:
|
||||
try:
|
||||
targetUserId = userId or self.userId
|
||||
if not targetUserId:
|
||||
logger.error("No user ID provided for voice settings")
|
||||
return None
|
||||
|
||||
recordFilter: Dict[str, Any] = {"userId": targetUserId}
|
||||
if self.featureInstanceId:
|
||||
recordFilter["featureInstanceId"] = self.featureInstanceId
|
||||
|
||||
filteredSettings = getRecordsetWithRBAC(
|
||||
self.db, VoiceSettings, self.currentUser,
|
||||
recordFilter=recordFilter, mandateId=self.mandateId,
|
||||
)
|
||||
|
||||
if not filteredSettings:
|
||||
return None
|
||||
|
||||
settingsData = filteredSettings[0]
|
||||
if not settingsData.get("creationDate"):
|
||||
settingsData["creationDate"] = getUtcTimestamp()
|
||||
if not settingsData.get("lastModified"):
|
||||
settingsData["lastModified"] = getUtcTimestamp()
|
||||
|
||||
return VoiceSettings(**settingsData)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting voice settings: {e}")
|
||||
return None
|
||||
|
||||
def createVoiceSettings(self, settingsData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
if "userId" not in settingsData:
|
||||
settingsData["userId"] = self.userId
|
||||
if "mandateId" not in settingsData:
|
||||
settingsData["mandateId"] = self.mandateId
|
||||
if "featureInstanceId" not in settingsData:
|
||||
settingsData["featureInstanceId"] = self.featureInstanceId
|
||||
|
||||
existing = self.getVoiceSettings(settingsData["userId"])
|
||||
if existing:
|
||||
raise ValueError(f"Voice settings already exist for user {settingsData['userId']}")
|
||||
|
||||
createdRecord = self.db.recordCreate(VoiceSettings, settingsData)
|
||||
if not createdRecord or not createdRecord.get("id"):
|
||||
raise ValueError("Failed to create voice settings record")
|
||||
|
||||
logger.info(f"Created voice settings for user {settingsData['userId']}")
|
||||
return createdRecord
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating voice settings: {e}")
|
||||
raise
|
||||
|
||||
def updateVoiceSettings(self, userId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
try:
|
||||
existing = self.getVoiceSettings(userId)
|
||||
if not existing:
|
||||
raise ValueError(f"Voice settings not found for user {userId}")
|
||||
|
||||
updateData["lastModified"] = getUtcTimestamp()
|
||||
success = self.db.recordModify(VoiceSettings, existing.id, updateData)
|
||||
if not success:
|
||||
raise ValueError("Failed to update voice settings record")
|
||||
|
||||
updated = self.getVoiceSettings(userId)
|
||||
if not updated:
|
||||
raise ValueError("Failed to retrieve updated voice settings")
|
||||
|
||||
logger.info(f"Updated voice settings for user {userId}")
|
||||
return updated.model_dump()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating voice settings: {e}")
|
||||
raise
|
||||
|
||||
def deleteVoiceSettings(self, userId: str) -> bool:
|
||||
try:
|
||||
existing = self.getVoiceSettings(userId)
|
||||
if not existing:
|
||||
return False
|
||||
success = self.db.recordDelete(VoiceSettings, existing.id)
|
||||
if success:
|
||||
logger.info(f"Deleted voice settings for user {userId}")
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting voice settings: {e}")
|
||||
return False
|
||||
|
||||
def getOrCreateVoiceSettings(self, userId: Optional[str] = None) -> VoiceSettings:
|
||||
targetUserId = userId or self.userId
|
||||
if not targetUserId:
|
||||
raise ValueError("No user ID provided for voice settings")
|
||||
|
||||
existing = self.getVoiceSettings(targetUserId)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
defaultSettings = {
|
||||
"userId": targetUserId,
|
||||
"mandateId": self.mandateId,
|
||||
"featureInstanceId": self.featureInstanceId,
|
||||
"sttLanguage": "de-DE",
|
||||
"ttsLanguage": "de-DE",
|
||||
"ttsVoice": "de-DE-KatjaNeural",
|
||||
"translationEnabled": True,
|
||||
"targetLanguage": "en-US",
|
||||
}
|
||||
createdRecord = self.createVoiceSettings(defaultSettings)
|
||||
return VoiceSettings(**createdRecord)
|
||||
|
||||
# =========================================================================
|
||||
# WorkspaceUserSettings CRUD
|
||||
# =========================================================================
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ TEMPLATE_ROLES = [
|
|||
"accessRules": [
|
||||
{"context": "UI", "item": None, "view": True},
|
||||
{"context": "RESOURCE", "item": None, "view": True},
|
||||
# DATA: never ALL in shared instances — every role (including admin) sees only _createdBy = self
|
||||
# DATA: never ALL in shared instances — every role (including admin) sees only sysCreatedBy = self
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -76,6 +76,27 @@ class _PendingEditsStore:
|
|||
_pendingEditsStore = _PendingEditsStore()
|
||||
|
||||
|
||||
def _workspaceBillingFeatureCode(user, mandateId: Optional[str], instanceId: str) -> Optional[str]:
|
||||
"""Resolve FeatureInstance.featureCode for billing/UI when workflow is not on ServiceCenterContext."""
|
||||
if not instanceId or not str(instanceId).strip():
|
||||
return None
|
||||
try:
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
|
||||
appIf = getAppInterface(user, mandateId=mandateId or None)
|
||||
inst = appIf.getFeatureInstance(str(instanceId).strip())
|
||||
if not inst:
|
||||
return None
|
||||
if isinstance(inst, dict):
|
||||
code = inst.get("featureCode")
|
||||
else:
|
||||
code = getattr(inst, "featureCode", None)
|
||||
return str(code).strip() if code else None
|
||||
except Exception as e:
|
||||
logger.debug("Workspace: feature code lookup failed for instance %s: %s", instanceId, e)
|
||||
return None
|
||||
|
||||
|
||||
class WorkspaceInputRequest(BaseModel):
|
||||
"""Prompt input for the unified workspace."""
|
||||
prompt: str = Field(description="User prompt text")
|
||||
|
|
@ -87,6 +108,7 @@ class WorkspaceInputRequest(BaseModel):
|
|||
workflowId: Optional[str] = Field(default=None, description="Continue existing workflow")
|
||||
userLanguage: str = Field(default="en", description="User language code")
|
||||
allowedProviders: List[str] = Field(default_factory=list, description="Restrict AI to these providers")
|
||||
requireNeutralization: Optional[bool] = Field(default=None, description="Per-request neutralization override")
|
||||
|
||||
|
||||
async def _getAiObjects() -> AiObjects:
|
||||
|
|
@ -162,6 +184,7 @@ _SOURCE_TYPE_TO_SERVICE = {
|
|||
"googleDriveFolder": "drive",
|
||||
"gmailFolder": "gmail",
|
||||
"ftpFolder": "files",
|
||||
"clickupList": "clickup",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -247,12 +270,19 @@ def _buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
|
|||
tableFields = obj.get("meta", {}).get("fields", [])
|
||||
break
|
||||
|
||||
recordFilter = fds.get("recordFilter")
|
||||
filterLine = ""
|
||||
if recordFilter and isinstance(recordFilter, dict):
|
||||
filterParts = [f"{k} = {v}" for k, v in recordFilter.items()]
|
||||
filterLine = f"\n recordFilter: {', '.join(filterParts)} (data is scoped to this record)"
|
||||
|
||||
parts.append(
|
||||
f"- featureInstanceId: {fiId}\n"
|
||||
f" feature: {featureCode}\n"
|
||||
f" instance: \"{instanceLabel}\"\n"
|
||||
f" table: {tableName} ({label})\n"
|
||||
f" fields: {', '.join(tableFields) if tableFields else 'all'}"
|
||||
f"{filterLine}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error loading FeatureDataSource {fdsId}: {e}")
|
||||
|
|
@ -545,11 +575,13 @@ async def streamWorkspaceStart(
|
|||
from modules.serviceCenter import getService
|
||||
from modules.serviceCenter.context import ServiceCenterContext
|
||||
|
||||
wsBillingFeatureCode = _workspaceBillingFeatureCode(context.user, mandateId or "", instanceId)
|
||||
svcCtx = ServiceCenterContext(
|
||||
user=context.user,
|
||||
mandate_id=mandateId or "",
|
||||
feature_instance_id=instanceId,
|
||||
workflow_id=workflowId,
|
||||
feature_code=wsBillingFeatureCode,
|
||||
)
|
||||
chatSvc = getService("chat", svcCtx)
|
||||
attachmentLabel = _buildWorkspaceAttachmentLabel(
|
||||
|
|
@ -588,6 +620,8 @@ async def streamWorkspaceStart(
|
|||
userLanguage=userInput.userLanguage,
|
||||
instanceConfig=instanceConfig,
|
||||
allowedProviders=userInput.allowedProviders,
|
||||
requireNeutralization=userInput.requireNeutralization,
|
||||
billingFeatureCode=wsBillingFeatureCode,
|
||||
)
|
||||
)
|
||||
eventManager.register_agent_task(queueId, agentTask)
|
||||
|
|
@ -643,6 +677,8 @@ async def _runWorkspaceAgent(
|
|||
userLanguage: str = "en",
|
||||
instanceConfig: Dict[str, Any] = None,
|
||||
allowedProviders: List[str] = None,
|
||||
requireNeutralization: Optional[bool] = None,
|
||||
billingFeatureCode: Optional[str] = None,
|
||||
):
|
||||
"""Run the serviceAgent loop and forward events to the SSE queue."""
|
||||
try:
|
||||
|
|
@ -653,6 +689,7 @@ async def _runWorkspaceAgent(
|
|||
mandate_id=mandateId,
|
||||
feature_instance_id=instanceId,
|
||||
workflow_id=workflowId,
|
||||
feature_code=billingFeatureCode,
|
||||
)
|
||||
agentService = getService("agent", ctx)
|
||||
chatService = getService("chat", ctx)
|
||||
|
|
@ -660,6 +697,11 @@ async def _runWorkspaceAgent(
|
|||
|
||||
if allowedProviders:
|
||||
aiService.services.allowedProviders = allowedProviders
|
||||
logger.info(f"Workspace agent: allowedProviders={allowedProviders}")
|
||||
else:
|
||||
logger.debug("Workspace agent: no allowedProviders in request")
|
||||
if requireNeutralization is not None:
|
||||
ctx.requireNeutralization = requireNeutralization
|
||||
|
||||
wfRecord = chatInterface.getWorkflow(workflowId) if workflowId else None
|
||||
wfName = ""
|
||||
|
|
@ -887,12 +929,30 @@ async def listWorkspaceWorkflows(
|
|||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
includeArchived: bool = Query(default=False, description="Include archived workflows"),
|
||||
search: str = Query(default="", description="Fulltext search in workflow titles and message content"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List workspace workflows/conversations for this instance."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
chatInterface = _getChatInterface(context, featureInstanceId=instanceId)
|
||||
workflows = chatInterface.getWorkflows() or []
|
||||
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
rootIf = getRootInterface()
|
||||
_fiCache: Dict[str, Dict[str, str]] = {}
|
||||
|
||||
def _resolveFeatureLabels(fiId: str) -> Dict[str, str]:
|
||||
if fiId not in _fiCache:
|
||||
fi = rootIf.getFeatureInstance(fiId)
|
||||
if fi:
|
||||
_fiCache[fiId] = {
|
||||
"featureLabel": getattr(fi, "label", "") or getattr(fi, "featureCode", fiId),
|
||||
"featureCode": getattr(fi, "featureCode", ""),
|
||||
}
|
||||
else:
|
||||
_fiCache[fiId] = {"featureLabel": fiId[:8], "featureCode": ""}
|
||||
return _fiCache[fiId]
|
||||
|
||||
items = []
|
||||
for wf in workflows:
|
||||
if isinstance(wf, dict):
|
||||
|
|
@ -904,13 +964,63 @@ async def listWorkspaceWorkflows(
|
|||
"status": getattr(wf, "status", ""),
|
||||
"startedAt": getattr(wf, "startedAt", None),
|
||||
"lastActivity": getattr(wf, "lastActivity", None),
|
||||
"featureInstanceId": getattr(wf, "featureInstanceId", instanceId),
|
||||
}
|
||||
if not includeArchived and item.get("status") == "archived":
|
||||
continue
|
||||
fiId = item.get("featureInstanceId") or instanceId
|
||||
labels = _resolveFeatureLabels(fiId)
|
||||
item.setdefault("featureLabel", labels["featureLabel"])
|
||||
item.setdefault("featureCode", labels["featureCode"])
|
||||
item.setdefault("featureInstanceId", fiId)
|
||||
|
||||
lastMsg = chatInterface.getLastMessageTimestamp(item.get("id"))
|
||||
if lastMsg:
|
||||
item["lastMessageAt"] = lastMsg
|
||||
|
||||
items.append(item)
|
||||
|
||||
if search and search.strip():
|
||||
searchLower = search.strip().lower()
|
||||
matchedIds = set()
|
||||
for item in items:
|
||||
if searchLower in (item.get("name") or "").lower() or searchLower in (item.get("label") or "").lower():
|
||||
matchedIds.add(item["id"])
|
||||
contentHits = chatInterface.searchWorkflowsByContent(searchLower, limit=50)
|
||||
matchedIds.update(contentHits)
|
||||
items = [i for i in items if i["id"] in matchedIds]
|
||||
|
||||
return JSONResponse({"workflows": items})
|
||||
|
||||
|
||||
class ResolveRagRequest(BaseModel):
|
||||
"""Request body for resolving a chat via RAG."""
|
||||
chatId: str = Field(..., description="Workflow/chat ID to resolve")
|
||||
|
||||
|
||||
@router.post("/{instanceId}/resolve-rag")
|
||||
@limiter.limit("60/minute")
|
||||
async def resolveRag(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
body: ResolveRagRequest = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Build a RAG summary for a chat (workflow) to inject into the input area."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
chatInterface = _getChatInterface(context, featureInstanceId=instanceId)
|
||||
messages = chatInterface.getMessages(body.chatId) or []
|
||||
|
||||
texts = []
|
||||
for msg in messages[:30]:
|
||||
content = msg.get("message") if isinstance(msg, dict) else getattr(msg, "message", "")
|
||||
if content:
|
||||
texts.append(content[:500])
|
||||
|
||||
summary = "\n---\n".join(texts[:10]) if texts else ""
|
||||
return JSONResponse({"summary": summary, "chatId": body.chatId, "messageCount": len(texts)})
|
||||
|
||||
|
||||
class UpdateWorkflowRequest(BaseModel):
|
||||
"""Request body for updating a workflow (PATCH)."""
|
||||
name: Optional[str] = Field(default=None, description="New workflow name")
|
||||
|
|
@ -1233,8 +1343,8 @@ async def listFeatureConnections(
|
|||
instanceId: str = Path(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List feature instances the user has access to across ALL mandates."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
"""List feature instances the user has access to, scoped to the workspace mandate."""
|
||||
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.security.rbacCatalog import getCatalogService
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
|
|
@ -1249,8 +1359,14 @@ async def listFeatureConnections(
|
|||
if not userMandates:
|
||||
return JSONResponse({"featureConnectionsByMandate": []})
|
||||
|
||||
allowedMandateIds = {um.mandateId for um in userMandates}
|
||||
if wsMandateId and wsMandateId in allowedMandateIds:
|
||||
allowedMandateIds = {wsMandateId}
|
||||
|
||||
mandateLabels: dict = {}
|
||||
for um in userMandates:
|
||||
if um.mandateId not in allowedMandateIds:
|
||||
continue
|
||||
try:
|
||||
rows = rootIf.db.getRecordset(Mandate, recordFilter={"id": um.mandateId})
|
||||
if rows:
|
||||
|
|
@ -1262,6 +1378,8 @@ async def listFeatureConnections(
|
|||
byMandate: dict = {}
|
||||
seenIds: set = set()
|
||||
for um in userMandates:
|
||||
if um.mandateId not in allowedMandateIds:
|
||||
continue
|
||||
allInstances = rootIf.getFeatureInstancesByMandate(um.mandateId)
|
||||
for inst in allInstances:
|
||||
if inst.id in seenIds:
|
||||
|
|
@ -1315,7 +1433,7 @@ async def listFeatureConnectionTables(
|
|||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List data tables (DATA_OBJECTS) for a feature instance, filtered by RBAC."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.security.rbacCatalog import getCatalogService
|
||||
|
||||
|
|
@ -1325,6 +1443,8 @@ async def listFeatureConnectionTables(
|
|||
raise HTTPException(status_code=404, detail="Feature instance not found")
|
||||
|
||||
mandateId = str(inst.mandateId) if inst.mandateId else None
|
||||
if wsMandateId and mandateId and mandateId != wsMandateId:
|
||||
raise HTTPException(status_code=403, detail="Feature instance does not belong to workspace mandate")
|
||||
catalog = getCatalogService()
|
||||
|
||||
try:
|
||||
|
|
@ -1345,16 +1465,132 @@ async def listFeatureConnectionTables(
|
|||
tables = []
|
||||
for obj in accessible:
|
||||
meta = obj.get("meta", {})
|
||||
tables.append({
|
||||
node = {
|
||||
"objectKey": obj.get("objectKey", ""),
|
||||
"tableName": meta.get("table", ""),
|
||||
"label": obj.get("label", {}),
|
||||
"fields": meta.get("fields", []),
|
||||
})
|
||||
}
|
||||
if meta.get("isParent"):
|
||||
node["isParent"] = True
|
||||
node["displayFields"] = meta.get("displayFields", [])
|
||||
if meta.get("parentTable"):
|
||||
node["parentTable"] = meta["parentTable"]
|
||||
node["parentKey"] = meta.get("parentKey", "")
|
||||
tables.append(node)
|
||||
|
||||
return JSONResponse({"tables": tables})
|
||||
|
||||
|
||||
@router.get("/{instanceId}/feature-connections/{fiId}/parent-objects/{tableName}")
|
||||
@limiter.limit("120/minute")
|
||||
async def listParentObjects(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
fiId: str = Path(..., description="Feature instance ID"),
|
||||
tableName: str = Path(..., description="Parent table name from DATA_OBJECTS"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List records from a parent table so the user can pick a specific record to scope data."""
|
||||
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.security.rbacCatalog import getCatalogService
|
||||
|
||||
rootIf = getRootInterface()
|
||||
inst = rootIf.getFeatureInstance(fiId)
|
||||
if not inst:
|
||||
raise HTTPException(status_code=404, detail="Feature instance not found")
|
||||
|
||||
featureCode = inst.featureCode
|
||||
mandateId = str(inst.mandateId) if inst.mandateId else ""
|
||||
if wsMandateId and mandateId and mandateId != wsMandateId:
|
||||
raise HTTPException(status_code=403, detail="Feature instance does not belong to workspace mandate")
|
||||
catalog = getCatalogService()
|
||||
|
||||
parentObj = None
|
||||
for obj in catalog.getDataObjects(featureCode):
|
||||
meta = obj.get("meta", {})
|
||||
if meta.get("table") == tableName and meta.get("isParent"):
|
||||
parentObj = obj
|
||||
break
|
||||
if not parentObj:
|
||||
raise HTTPException(status_code=400, detail=f"Table '{tableName}' is not a registered parent table")
|
||||
|
||||
displayFields = parentObj["meta"].get("displayFields", [])
|
||||
selectCols = ', '.join(f'"{f}"' for f in (["id"] + displayFields)) if displayFields else "*"
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
featureDbName = f"poweron_{featureCode.lower()}"
|
||||
featureDbConn = None
|
||||
try:
|
||||
featureDbConn = DatabaseConnector(
|
||||
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
|
||||
dbDatabase=featureDbName,
|
||||
dbUser=APP_CONFIG.get("DB_USER"),
|
||||
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
|
||||
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||
userId=str(context.user.id),
|
||||
)
|
||||
conn = featureDbConn.connection
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT column_name FROM information_schema.columns "
|
||||
"WHERE table_schema = 'public' AND LOWER(table_name) = LOWER(%s) "
|
||||
"AND column_name IN ('featureInstanceId', 'instanceId')",
|
||||
[tableName],
|
||||
)
|
||||
instanceCols = [row["column_name"] for row in cur.fetchall()]
|
||||
instanceCol = "featureInstanceId" if "featureInstanceId" in instanceCols else "instanceId"
|
||||
|
||||
cur.execute(
|
||||
"SELECT column_name FROM information_schema.columns "
|
||||
"WHERE table_schema = 'public' AND LOWER(table_name) = LOWER(%s) "
|
||||
"AND column_name = 'userId'",
|
||||
[tableName],
|
||||
)
|
||||
hasUserId = cur.rowcount > 0
|
||||
|
||||
sql = (
|
||||
f'SELECT {selectCols} FROM "{tableName}" '
|
||||
f'WHERE "{instanceCol}" = %s'
|
||||
)
|
||||
params = [fiId]
|
||||
if mandateId:
|
||||
sql += ' AND "mandateId" = %s'
|
||||
params.append(mandateId)
|
||||
if hasUserId:
|
||||
sql += ' AND "userId" = %s'
|
||||
params.append(str(context.user.id))
|
||||
sql += ' ORDER BY "id" DESC LIMIT 100'
|
||||
cur.execute(sql, params)
|
||||
rows = []
|
||||
for row in cur.fetchall():
|
||||
r = dict(row)
|
||||
for k, v in r.items():
|
||||
if hasattr(v, "isoformat"):
|
||||
r[k] = v.isoformat()
|
||||
elif isinstance(v, (bytes, bytearray)):
|
||||
r[k] = f"<binary {len(v)} bytes>"
|
||||
displayParts = [str(r.get(f, "")) for f in displayFields if r.get(f) is not None]
|
||||
rows.append({
|
||||
"id": r.get("id", ""),
|
||||
"displayLabel": " | ".join(displayParts) if displayParts else r.get("id", ""),
|
||||
"fields": {f: r.get(f) for f in displayFields},
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"listParentObjects({tableName}) failed: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail=f"Failed to list parent objects: {e}")
|
||||
finally:
|
||||
if featureDbConn:
|
||||
try:
|
||||
featureDbConn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return JSONResponse({"parentObjects": rows})
|
||||
|
||||
|
||||
class CreateFeatureDataSourceRequest(BaseModel):
|
||||
"""Request body for adding a feature table as data source."""
|
||||
featureInstanceId: str = Field(description="Feature instance ID")
|
||||
|
|
@ -1362,6 +1598,7 @@ class CreateFeatureDataSourceRequest(BaseModel):
|
|||
tableName: str = Field(description="Table name from DATA_OBJECTS")
|
||||
objectKey: str = Field(description="RBAC object key")
|
||||
label: str = Field(description="User-visible label")
|
||||
recordFilter: Optional[dict] = Field(default=None, description="Record-level filter for scoping")
|
||||
|
||||
|
||||
@router.post("/{instanceId}/feature-datasources")
|
||||
|
|
@ -1373,13 +1610,15 @@ async def createFeatureDataSource(
|
|||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Create a FeatureDataSource for this workspace instance."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
|
||||
|
||||
rootIf = getRootInterface()
|
||||
inst = rootIf.getFeatureInstance(body.featureInstanceId)
|
||||
mandateId = str(inst.mandateId) if inst else (str(context.mandateId) if context.mandateId else "")
|
||||
if wsMandateId and mandateId and mandateId != wsMandateId:
|
||||
raise HTTPException(status_code=403, detail="Feature instance does not belong to workspace mandate")
|
||||
|
||||
fds = FeatureDataSource(
|
||||
featureInstanceId=body.featureInstanceId,
|
||||
|
|
@ -1390,6 +1629,7 @@ async def createFeatureDataSource(
|
|||
mandateId=mandateId,
|
||||
userId=str(context.user.id),
|
||||
workspaceInstanceId=instanceId,
|
||||
recordFilter=body.recordFilter,
|
||||
)
|
||||
created = rootIf.db.recordCreate(FeatureDataSource, fds.model_dump())
|
||||
return JSONResponse(created if isinstance(created, dict) else fds.model_dump())
|
||||
|
|
@ -1582,137 +1822,6 @@ async def synthesizeVoice(
|
|||
return JSONResponse({"audio": None, "note": "TTS via browser Speech Synthesis API recommended"})
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Voice Settings Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/settings/voice")
|
||||
@limiter.limit("120/minute")
|
||||
async def getVoiceSettings(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Load voice settings for the current user and instance."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
wsInterface = _getWorkspaceInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
try:
|
||||
vs = wsInterface.getVoiceSettings(userId)
|
||||
if not vs:
|
||||
logger.info(f"GET voice settings: not found for user={userId}, creating defaults")
|
||||
vs = wsInterface.getOrCreateVoiceSettings(userId)
|
||||
result = vs.model_dump() if vs else {}
|
||||
mapKeys = list(result.get("ttsVoiceMap", {}).keys()) if result else []
|
||||
logger.info(f"GET voice settings for user={userId}: ttsVoiceMap languages={mapKeys}")
|
||||
return JSONResponse(result)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load voice settings for user={userId}: {e}", exc_info=True)
|
||||
return JSONResponse({"ttsVoiceMap": {}}, status_code=200)
|
||||
|
||||
|
||||
@router.put("/{instanceId}/settings/voice")
|
||||
@limiter.limit("120/minute")
|
||||
async def updateVoiceSettings(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
body: dict = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Update voice settings for the current user and instance."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
wsInterface = _getWorkspaceInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
try:
|
||||
logger.info(f"PUT voice settings for user={userId}, instance={instanceId}, body keys={list(body.keys())}")
|
||||
vs = wsInterface.getVoiceSettings(userId)
|
||||
if not vs:
|
||||
logger.info(f"No existing voice settings, creating new for user={userId}")
|
||||
createData = {
|
||||
"userId": userId,
|
||||
"mandateId": str(context.mandateId) if context.mandateId else "",
|
||||
"featureInstanceId": instanceId,
|
||||
}
|
||||
createData.update(body)
|
||||
created = wsInterface.createVoiceSettings(createData)
|
||||
logger.info(f"Created voice settings for user={userId}, ttsVoiceMap keys={list((created or {}).get('ttsVoiceMap', {}).keys())}")
|
||||
return JSONResponse(created)
|
||||
|
||||
updateData = {k: v for k, v in body.items() if k not in ("id", "userId", "mandateId", "featureInstanceId", "creationDate")}
|
||||
logger.info(f"Updating voice settings for user={userId}, update keys={list(updateData.keys())}")
|
||||
updated = wsInterface.updateVoiceSettings(userId, updateData)
|
||||
logger.info(f"Updated voice settings for user={userId}, ttsVoiceMap keys={list((updated or {}).get('ttsVoiceMap', {}).keys())}")
|
||||
return JSONResponse(updated)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update voice settings for user={userId}: {e}", exc_info=True)
|
||||
return JSONResponse({"error": str(e)}, status_code=500)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/voice/languages")
|
||||
@limiter.limit("120/minute")
|
||||
async def getVoiceLanguages(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Return available TTS languages."""
|
||||
mandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
languagesResult = await voiceInterface.getAvailableLanguages()
|
||||
languageList = languagesResult.get("languages", []) if isinstance(languagesResult, dict) else languagesResult
|
||||
return JSONResponse({"languages": languageList})
|
||||
|
||||
|
||||
@router.get("/{instanceId}/voice/voices")
|
||||
@limiter.limit("120/minute")
|
||||
async def getVoiceVoices(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
language: str = Query("de-DE"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Return available TTS voices for a given language."""
|
||||
mandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
voicesResult = await voiceInterface.getAvailableVoices(language)
|
||||
voiceList = voicesResult.get("voices", []) if isinstance(voicesResult, dict) else voicesResult
|
||||
return JSONResponse({"voices": voiceList})
|
||||
|
||||
|
||||
@router.post("/{instanceId}/voice/test")
|
||||
@limiter.limit("30/minute")
|
||||
async def testVoice(
|
||||
request: Request,
|
||||
instanceId: str = Path(...),
|
||||
body: dict = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Test a specific voice with a sample text."""
|
||||
import base64
|
||||
mandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||
text = body.get("text", "Hallo, das ist ein Stimmtest.")
|
||||
language = body.get("language", "de-DE")
|
||||
voiceId = body.get("voiceId")
|
||||
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
|
||||
try:
|
||||
result = await voiceInterface.textToSpeech(text=text, languageCode=language, voiceName=voiceId)
|
||||
if result and isinstance(result, dict):
|
||||
audioContent = result.get("audioContent")
|
||||
if audioContent:
|
||||
audioB64 = base64.b64encode(
|
||||
audioContent if isinstance(audioContent, bytes) else audioContent.encode()
|
||||
).decode()
|
||||
return JSONResponse({"success": True, "audio": audioB64, "format": "mp3", "text": text})
|
||||
return JSONResponse({"success": False, "error": "TTS returned no audio"})
|
||||
except Exception as e:
|
||||
logger.error(f"Voice test failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"TTS test failed: {str(e)}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ class AiObjects:
|
|||
logger.info(f"Attempting AI call with model: {model.name} (attempt {attempt + 1}/{len(failoverModelList)})")
|
||||
|
||||
if request.messages:
|
||||
response = await self._callWithMessages(model, request.messages, options, request.tools)
|
||||
response = await self._callWithMessages(model, request.messages, options, request.tools, toolChoice=request.toolChoice)
|
||||
else:
|
||||
response = await self._callWithModel(model, prompt, context, options)
|
||||
|
||||
|
|
@ -149,7 +149,7 @@ class AiObjects:
|
|||
await asyncio.sleep(retryAfter + 0.5)
|
||||
try:
|
||||
if request.messages:
|
||||
response = await self._callWithMessages(model, request.messages, options, request.tools)
|
||||
response = await self._callWithMessages(model, request.messages, options, request.tools, toolChoice=request.toolChoice)
|
||||
else:
|
||||
response = await self._callWithModel(model, prompt, context, options)
|
||||
logger.info(f"AI call successful with {model.name} after rate-limit retry")
|
||||
|
|
@ -288,7 +288,8 @@ class AiObjects:
|
|||
|
||||
async def _callWithMessages(self, model: AiModel, messages: List[Dict[str, Any]],
|
||||
options: AiCallOptions = None,
|
||||
tools: List[Dict[str, Any]] = None) -> AiCallResponse:
|
||||
tools: List[Dict[str, Any]] = None,
|
||||
toolChoice: Any = None) -> AiCallResponse:
|
||||
"""Call a model with pre-built messages (agent mode). Supports tools for native function calling."""
|
||||
import json as _json
|
||||
|
||||
|
|
@ -302,7 +303,8 @@ class AiObjects:
|
|||
messages=messages,
|
||||
model=model,
|
||||
options=options or {},
|
||||
tools=tools
|
||||
tools=tools,
|
||||
toolChoice=toolChoice,
|
||||
)
|
||||
|
||||
modelResponse = await model.functionCall(modelCall)
|
||||
|
|
@ -379,7 +381,7 @@ class AiObjects:
|
|||
for attempt, model in enumerate(failoverModelList):
|
||||
try:
|
||||
logger.info(f"Streaming AI call with model: {model.name} (attempt {attempt + 1})")
|
||||
async for chunk in self._callWithMessagesStream(model, request.messages, options, request.tools):
|
||||
async for chunk in self._callWithMessagesStream(model, request.messages, options, request.tools, toolChoice=request.toolChoice):
|
||||
yield chunk
|
||||
return
|
||||
|
||||
|
|
@ -390,7 +392,7 @@ class AiObjects:
|
|||
logger.info(f"Rate limit on {model.name}, waiting {retryAfter:.1f}s before retry")
|
||||
await asyncio.sleep(retryAfter + 0.5)
|
||||
try:
|
||||
async for chunk in self._callWithMessagesStream(model, request.messages, options, request.tools):
|
||||
async for chunk in self._callWithMessagesStream(model, request.messages, options, request.tools, toolChoice=request.toolChoice):
|
||||
yield chunk
|
||||
return
|
||||
except Exception as retryErr:
|
||||
|
|
@ -421,6 +423,7 @@ class AiObjects:
|
|||
async def _callWithMessagesStream(
|
||||
self, model: AiModel, messages: List[Dict[str, Any]],
|
||||
options: AiCallOptions = None, tools: List[Dict[str, Any]] = None,
|
||||
toolChoice: Any = None,
|
||||
) -> AsyncGenerator[Union[str, AiCallResponse], None]:
|
||||
"""Stream a model call. Yields str deltas, then final AiCallResponse with billing."""
|
||||
from modules.datamodels.datamodelAi import AiModelCall, AiModelResponse
|
||||
|
|
@ -429,7 +432,7 @@ class AiObjects:
|
|||
startTime = time.time()
|
||||
|
||||
if not model.functionCallStream:
|
||||
response = await self._callWithMessages(model, messages, options, tools)
|
||||
response = await self._callWithMessages(model, messages, options, tools, toolChoice=toolChoice)
|
||||
if response.content:
|
||||
yield response.content
|
||||
yield response
|
||||
|
|
@ -438,6 +441,7 @@ class AiObjects:
|
|||
modelCall = AiModelCall(
|
||||
messages=messages, model=model,
|
||||
options=options or {}, tools=tools,
|
||||
toolChoice=toolChoice,
|
||||
)
|
||||
|
||||
finalModelResponse = None
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ Multi-Tenant Design:
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, Dict
|
||||
from typing import Optional, Dict, Tuple
|
||||
from passlib.context import CryptContext
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.datamodels.datamodelUam import (
|
||||
Mandate,
|
||||
|
|
@ -38,6 +38,89 @@ pwdContext = CryptContext(schemes=["argon2"], deprecated="auto")
|
|||
# Cache für Role-IDs (roleLabel -> roleId)
|
||||
_roleIdCache: Dict[str, str] = {}
|
||||
|
||||
# PowerOn logical databases to scan (same set as gateway/scripts/script_db_export_migration.py ALL_DATABASES).
|
||||
_POWERON_DATABASE_NAMES: Tuple[str, ...] = (
|
||||
"poweron_app",
|
||||
"poweron_automation",
|
||||
"poweron_automation2",
|
||||
"poweron_billing",
|
||||
"poweron_chat",
|
||||
"poweron_chatbot",
|
||||
"poweron_commcoach",
|
||||
"poweron_knowledge",
|
||||
"poweron_management",
|
||||
"poweron_neutralization",
|
||||
"poweron_realestate",
|
||||
"poweron_teamsbot",
|
||||
"poweron_test",
|
||||
"poweron_trustee",
|
||||
"poweron_workspace",
|
||||
)
|
||||
|
||||
|
||||
def _configPrefixForPoweronDatabase(dbName: str) -> str:
|
||||
return {
|
||||
"poweron_app": "DB_APP",
|
||||
"poweron_chat": "DB_CHAT",
|
||||
"poweron_chatbot": "DB_CHATBOT",
|
||||
"poweron_management": "DB_MANAGEMENT",
|
||||
"poweron_realestate": "DB_REALESTATE",
|
||||
"poweron_trustee": "DB_TRUSTEE",
|
||||
# Same as initAutomationTemplates: default DB_* (not a separate DB_AUTOMATION_* prefix).
|
||||
"poweron_automation": "DB",
|
||||
"poweron_billing": "DB",
|
||||
}.get(dbName, "DB")
|
||||
|
||||
|
||||
def _openConnectorForPoweronDatabase(dbName: str) -> Optional[DatabaseConnector]:
|
||||
"""Connect to a named PowerOn database using DB_* / DB_APP_* style config (shared with export script)."""
|
||||
prefix = _configPrefixForPoweronDatabase(dbName)
|
||||
host = APP_CONFIG.get(f"{prefix}_HOST") or APP_CONFIG.get("DB_HOST", "localhost")
|
||||
user = APP_CONFIG.get(f"{prefix}_USER") or APP_CONFIG.get("DB_USER")
|
||||
password = APP_CONFIG.get(f"{prefix}_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
portRaw = APP_CONFIG.get(f"{prefix}_PORT") or APP_CONFIG.get("DB_PORT", 5432)
|
||||
try:
|
||||
port = int(portRaw)
|
||||
except (TypeError, ValueError):
|
||||
port = 5432
|
||||
if not user or not password:
|
||||
logger.debug(
|
||||
f"bootstrap: skip legacy _* -> sys* migration for {dbName} (missing credentials for {prefix})"
|
||||
)
|
||||
return None
|
||||
try:
|
||||
return _get_cached_connector(
|
||||
dbHost=host,
|
||||
dbDatabase=dbName,
|
||||
dbUser=user,
|
||||
dbPassword=password,
|
||||
dbPort=port,
|
||||
userId=None,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"bootstrap: cannot open {dbName} for legacy _* -> sys* migration: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def migrateLegacyUnderscoreSysColumnsAllPoweronDatabases() -> None:
|
||||
"""
|
||||
Run DatabaseConnector.migrateLegacyUnderscoreSysColumns on every configured PowerOn database.
|
||||
Actual table scan and SQL live in the connector module.
|
||||
"""
|
||||
grandTotal = 0
|
||||
for dbName in _POWERON_DATABASE_NAMES:
|
||||
conn = _openConnectorForPoweronDatabase(dbName)
|
||||
if not conn:
|
||||
continue
|
||||
try:
|
||||
grandTotal += conn.migrateLegacyUnderscoreSysColumns()
|
||||
except Exception as e:
|
||||
logger.warning(f"bootstrap: migrateLegacyUnderscoreSysColumns failed for {dbName}: {e}")
|
||||
if grandTotal:
|
||||
logger.info(
|
||||
f"bootstrap: legacy _* -> sys* migration total {grandTotal} cell(s) across PowerOn databases"
|
||||
)
|
||||
|
||||
|
||||
def initBootstrap(db: DatabaseConnector) -> None:
|
||||
"""
|
||||
|
|
@ -51,6 +134,9 @@ def initBootstrap(db: DatabaseConnector) -> None:
|
|||
# Initialize root mandate
|
||||
mandateId = initRootMandate(db)
|
||||
|
||||
# Copy legacy _createdAt/_createdBy/_modifiedAt/_modifiedBy into sys* on all PowerOn DBs (connector routine)
|
||||
migrateLegacyUnderscoreSysColumnsAllPoweronDatabases()
|
||||
|
||||
# Migrate existing mandate records: description -> label
|
||||
_migrateMandateDescriptionToLabel(db)
|
||||
|
||||
|
|
@ -92,9 +178,39 @@ def initBootstrap(db: DatabaseConnector) -> None:
|
|||
# Seed automation templates (after admin user exists)
|
||||
initAutomationTemplates(db, adminUserId)
|
||||
|
||||
# Initialize feature instances for root mandate
|
||||
# Run root-user migration (one-time, sets completion flag)
|
||||
migrationDone = False
|
||||
try:
|
||||
from modules.migration.migrateRootUsers import migrateRootUsers, _isMigrationCompleted
|
||||
migrationDone = _isMigrationCompleted(db)
|
||||
if not migrationDone:
|
||||
# Create root instances first (needed for migration), then migrate
|
||||
if mandateId:
|
||||
initRootMandateFeatures(db, mandateId)
|
||||
result = migrateRootUsers(db)
|
||||
migrationDone = result.get("status") != "error"
|
||||
else:
|
||||
migrationDone = True
|
||||
except Exception as e:
|
||||
logger.error(f"Root user migration failed: {e}")
|
||||
|
||||
# Run voice & documents migration (one-time, sets completion flag)
|
||||
try:
|
||||
from modules.migration.migrateVoiceAndDocuments import migrateVoiceAndDocuments
|
||||
migrateVoiceAndDocuments(db)
|
||||
except Exception as e:
|
||||
logger.error(f"Voice & documents migration failed: {e}")
|
||||
|
||||
# Backfill FileContentIndex scope fields from FileItem (one-time)
|
||||
try:
|
||||
from modules.migration.migrateRagScopeFields import runMigration as migrateRagScope
|
||||
migrateRagScope(appDb=db)
|
||||
except Exception as e:
|
||||
logger.error(f"RAG scope fields migration failed: {e}")
|
||||
|
||||
# After migration: root mandate is purely technical — no feature instances
|
||||
if not migrationDone and mandateId:
|
||||
initRootMandateFeatures(db, mandateId)
|
||||
|
||||
# Remove feature instances for features that no longer exist in the codebase
|
||||
_cleanupRemovedFeatureInstances(db)
|
||||
|
|
@ -110,18 +226,26 @@ def initBootstrap(db: DatabaseConnector) -> None:
|
|||
# Auto-provision Stripe Products/Prices for paid plans (idempotent)
|
||||
_bootstrapStripePrices()
|
||||
|
||||
# Purge soft-deleted mandates past 30-day retention
|
||||
try:
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
rootIf = getRootInterface()
|
||||
rootIf.purgeExpiredMandates(retentionDays=30)
|
||||
except Exception as e:
|
||||
logger.warning(f"Mandate retention purge failed: {e}")
|
||||
|
||||
|
||||
def initAutomationTemplates(dbApp: DatabaseConnector, adminUserId: Optional[str] = None) -> None:
|
||||
"""
|
||||
Seed initial automation templates from subAutomationTemplates.py.
|
||||
Only runs if no templates exist yet (bootstrap).
|
||||
Creates templates with _createdBy = admin user (SysAdmin privilege).
|
||||
Creates templates with sysCreatedBy = admin user (SysAdmin privilege).
|
||||
|
||||
NOTE: AutomationTemplate lives in poweron_automation database, not poweron_app!
|
||||
|
||||
Args:
|
||||
dbApp: Database connector for poweron_app (used to get admin user if needed)
|
||||
adminUserId: Admin user ID for _createdBy field
|
||||
adminUserId: Admin user ID for sysCreatedBy field
|
||||
"""
|
||||
import json
|
||||
from modules.features.automation.subAutomationTemplates import AUTOMATION_TEMPLATES
|
||||
|
|
@ -2004,71 +2128,43 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
|
|||
|
||||
def initRootMandateBilling(mandateId: str) -> None:
|
||||
"""
|
||||
Initialize billing settings for root mandate.
|
||||
Root mandate uses PREPAY_USER model with default initial credit per user in settings (DEFAULT_USER_CREDIT_CHF at bootstrap only).
|
||||
Creates billing accounts for ALL users regardless of billing model (for audit trail).
|
||||
|
||||
Args:
|
||||
mandateId: Root mandate ID
|
||||
Initialize billing settings for root mandate (PREPAY_MANDATE).
|
||||
Creates mandate pool account and user audit accounts.
|
||||
"""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface as getAppRootInterface
|
||||
from modules.datamodels.datamodelBilling import (
|
||||
BillingSettings,
|
||||
BillingModelEnum,
|
||||
DEFAULT_USER_CREDIT_CHF,
|
||||
parseBillingModelFromStoredValue,
|
||||
)
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
appInterface = getAppRootInterface()
|
||||
|
||||
# Check if settings already exist
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
if existingSettings:
|
||||
logger.info("Billing settings for root mandate already exist")
|
||||
else:
|
||||
settings = BillingSettings(
|
||||
mandateId=mandateId,
|
||||
billingModel=BillingModelEnum.PREPAY_USER,
|
||||
defaultUserCredit=DEFAULT_USER_CREDIT_CHF,
|
||||
warningThresholdPercent=10.0,
|
||||
notifyOnWarning=True
|
||||
)
|
||||
|
||||
billingInterface.createSettings(settings)
|
||||
logger.info(
|
||||
f"Created billing settings for root mandate: PREPAY_USER with {DEFAULT_USER_CREDIT_CHF} CHF default credit"
|
||||
)
|
||||
logger.info("Created billing settings for root mandate: PREPAY_MANDATE")
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
|
||||
# Always create user accounts for all users (audit trail)
|
||||
if existingSettings:
|
||||
billingModel = parseBillingModelFromStoredValue(
|
||||
existingSettings.get("billingModel")
|
||||
).value
|
||||
|
||||
# Initial balance depends on billing model
|
||||
if billingModel == BillingModelEnum.PREPAY_USER.value:
|
||||
initialBalance = float(existingSettings.get("defaultUserCredit", 0.0))
|
||||
else:
|
||||
initialBalance = 0.0 # PREPAY_MANDATE: budget on pool account
|
||||
|
||||
billingInterface.getOrCreateMandateAccount(mandateId, initialBalance=0.0)
|
||||
userMandates = appInterface.getUserMandatesByMandate(mandateId)
|
||||
accountsCreated = 0
|
||||
|
||||
for um in userMandates:
|
||||
userId = um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None)
|
||||
if userId:
|
||||
existingAccount = billingInterface.getUserAccount(mandateId, userId)
|
||||
if not existingAccount:
|
||||
billingInterface.getOrCreateUserAccount(mandateId, userId, initialBalance=initialBalance)
|
||||
billingInterface.getOrCreateUserAccount(mandateId, userId, initialBalance=0.0)
|
||||
accountsCreated += 1
|
||||
logger.debug(f"Created billing account for user {userId}")
|
||||
|
||||
if accountsCreated > 0:
|
||||
logger.info(f"Created {accountsCreated} billing accounts for root mandate users with {initialBalance} CHF each")
|
||||
logger.info(f"Created {accountsCreated} billing audit accounts for root mandate users")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize root mandate billing (non-critical): {e}")
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -9,7 +9,7 @@ All billing data is stored in the poweron_billing database.
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
from datetime import date, datetime, timedelta
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
import uuid
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
|
|
@ -24,19 +24,49 @@ from modules.datamodels.datamodelBilling import (
|
|||
BillingSettings,
|
||||
StripeWebhookEvent,
|
||||
UsageStatistics,
|
||||
BillingModelEnum,
|
||||
AccountTypeEnum,
|
||||
TransactionTypeEnum,
|
||||
ReferenceTypeEnum,
|
||||
PeriodTypeEnum,
|
||||
BillingBalanceResponse,
|
||||
BillingCheckResult,
|
||||
parseBillingModelFromStoredValue,
|
||||
STORAGE_PRICE_PER_GB_CHF,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _logBillingTransactionsMissingSysCreatedAt(rows: List[Dict[str, Any]], context: str) -> None:
|
||||
"""Log ERROR when sysCreatedAt is missing; does not raise."""
|
||||
missingIds = [r.get("id") for r in rows if r.get("sysCreatedAt") is None]
|
||||
if not missingIds:
|
||||
return
|
||||
cap = 40
|
||||
sample = missingIds[:cap]
|
||||
suffix = f"; ... (+{len(missingIds) - cap} more)" if len(missingIds) > cap else ""
|
||||
logger.error(
|
||||
"BillingTransaction missing sysCreatedAt (%s): count=%s; transactionIds=%s%s",
|
||||
context,
|
||||
len(missingIds),
|
||||
sample,
|
||||
suffix,
|
||||
)
|
||||
|
||||
|
||||
def _numericSysCreatedAtForSort(row: Dict[str, Any]) -> float:
|
||||
v = row["sysCreatedAt"]
|
||||
if isinstance(v, datetime):
|
||||
return v.timestamp()
|
||||
return float(v)
|
||||
|
||||
|
||||
def _sortBillingTransactionsBySysCreatedAtDesc(rows: List[Dict[str, Any]], context: str) -> None:
|
||||
_logBillingTransactionsMissingSysCreatedAt(rows, context)
|
||||
valid = [r for r in rows if r.get("sysCreatedAt") is not None]
|
||||
invalid = [r for r in rows if r.get("sysCreatedAt") is None]
|
||||
valid.sort(key=_numericSysCreatedAtForSort, reverse=True)
|
||||
rows[:] = valid + invalid
|
||||
|
||||
|
||||
def _getAppDatabaseConnector() -> DatabaseConnector:
|
||||
"""App DB connector (same config as UserMandate reads in this module)."""
|
||||
return DatabaseConnector(
|
||||
|
|
@ -160,8 +190,6 @@ class BillingObjects:
|
|||
"""
|
||||
Get billing settings for a mandate.
|
||||
|
||||
Normalizes billingModel for API (legacy UNLIMITED → PREPAY_MANDATE) and persists once.
|
||||
|
||||
Args:
|
||||
mandateId: Mandate ID
|
||||
|
||||
|
|
@ -175,27 +203,7 @@ class BillingObjects:
|
|||
)
|
||||
if not results:
|
||||
return None
|
||||
row = dict(results[0])
|
||||
raw_bm = row.get("billingModel")
|
||||
parsed = parseBillingModelFromStoredValue(raw_bm)
|
||||
if str(raw_bm or "").strip().upper() == "UNLIMITED":
|
||||
try:
|
||||
self.updateSettings(
|
||||
row["id"],
|
||||
{"billingModel": BillingModelEnum.PREPAY_MANDATE.value},
|
||||
)
|
||||
logger.info(
|
||||
"Migrated billing settings for mandate %s: UNLIMITED → PREPAY_MANDATE",
|
||||
mandateId,
|
||||
)
|
||||
except Exception as mig_err:
|
||||
logger.warning(
|
||||
"Could not persist billing model migration for mandate %s: %s",
|
||||
mandateId,
|
||||
mig_err,
|
||||
)
|
||||
row["billingModel"] = parsed.value
|
||||
return row
|
||||
return dict(results[0])
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting billing settings: {e}")
|
||||
return None
|
||||
|
|
@ -226,13 +234,12 @@ class BillingObjects:
|
|||
"""
|
||||
return self.db.recordModify(BillingSettings, settingsId, updates)
|
||||
|
||||
def getOrCreateSettings(self, mandateId: str, defaultModel: BillingModelEnum = BillingModelEnum.PREPAY_MANDATE) -> Dict[str, Any]:
|
||||
def getOrCreateSettings(self, mandateId: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get or create billing settings for a mandate.
|
||||
|
||||
Args:
|
||||
mandateId: Mandate ID
|
||||
defaultModel: Default billing model if creating
|
||||
|
||||
Returns:
|
||||
BillingSettings dict
|
||||
|
|
@ -243,8 +250,6 @@ class BillingObjects:
|
|||
|
||||
settings = BillingSettings(
|
||||
mandateId=mandateId,
|
||||
billingModel=defaultModel,
|
||||
defaultUserCredit=0.0,
|
||||
warningThresholdPercent=10.0,
|
||||
notifyOnWarning=True,
|
||||
)
|
||||
|
|
@ -281,7 +286,7 @@ class BillingObjects:
|
|||
BillingAccount,
|
||||
recordFilter={
|
||||
"mandateId": mandateId,
|
||||
"accountType": AccountTypeEnum.MANDATE.value
|
||||
"userId": None
|
||||
}
|
||||
)
|
||||
return results[0] if results else None
|
||||
|
|
@ -305,8 +310,7 @@ class BillingObjects:
|
|||
BillingAccount,
|
||||
recordFilter={
|
||||
"mandateId": mandateId,
|
||||
"userId": userId,
|
||||
"accountType": AccountTypeEnum.USER.value
|
||||
"userId": userId
|
||||
}
|
||||
)
|
||||
return results[0] if results else None
|
||||
|
|
@ -376,7 +380,6 @@ class BillingObjects:
|
|||
|
||||
account = BillingAccount(
|
||||
mandateId=mandateId,
|
||||
accountType=AccountTypeEnum.MANDATE,
|
||||
balance=initialBalance,
|
||||
enabled=True
|
||||
)
|
||||
|
|
@ -401,7 +404,6 @@ class BillingObjects:
|
|||
account = BillingAccount(
|
||||
mandateId=mandateId,
|
||||
userId=userId,
|
||||
accountType=AccountTypeEnum.USER,
|
||||
balance=initialBalance,
|
||||
enabled=True
|
||||
)
|
||||
|
|
@ -422,7 +424,7 @@ class BillingObjects:
|
|||
def ensureAllMandateSettingsExist(self) -> int:
|
||||
"""
|
||||
Efficiently ensure all mandates have billing settings.
|
||||
Creates default settings (PREPAY_MANDATE, 0 CHF) for mandates without settings.
|
||||
Creates default settings (0 CHF) for mandates without settings.
|
||||
Uses bulk queries to minimize database connections.
|
||||
|
||||
Returns:
|
||||
|
|
@ -451,16 +453,13 @@ class BillingObjects:
|
|||
if not mandateId or mandateId in existingMandateIds:
|
||||
continue
|
||||
|
||||
# Create default billing settings
|
||||
settings = BillingSettings(
|
||||
mandateId=mandateId,
|
||||
billingModel=BillingModelEnum.PREPAY_MANDATE,
|
||||
defaultUserCredit=0.0,
|
||||
warningThresholdPercent=10.0,
|
||||
notifyOnWarning=True,
|
||||
)
|
||||
self.createSettings(settings)
|
||||
existingMandateIds.add(mandateId) # Track newly created
|
||||
existingMandateIds.add(mandateId)
|
||||
settingsCreated += 1
|
||||
|
||||
if settingsCreated > 0:
|
||||
|
|
@ -475,11 +474,7 @@ class BillingObjects:
|
|||
def ensureAllUserAccountsExist(self) -> int:
|
||||
"""
|
||||
Ensure all users across all mandates have billing accounts.
|
||||
User accounts are always created regardless of billing model (for audit trail).
|
||||
Initial balance depends on billing model:
|
||||
- PREPAY_USER: defaultUserCredit from settings only for the root mandate; other mandates get 0.0
|
||||
- PREPAY_MANDATE: 0.0 (budget is on pool)
|
||||
|
||||
User accounts are always created for audit trail with initial balance 0.0.
|
||||
Uses bulk queries to minimize database connections.
|
||||
|
||||
Returns:
|
||||
|
|
@ -488,44 +483,29 @@ class BillingObjects:
|
|||
try:
|
||||
accountsCreated = 0
|
||||
appDb = _getAppDatabaseConnector()
|
||||
rootMandateId = _getCachedRootMandateId()
|
||||
|
||||
# Step 1: Get all billing settings (all mandates with settings get user accounts)
|
||||
allSettings = self.db.getRecordset(BillingSettings)
|
||||
billingMandates = {} # mandateId -> (billingModel, defaultCredit)
|
||||
for s in allSettings:
|
||||
billingModel = parseBillingModelFromStoredValue(s.get("billingModel")).value
|
||||
mid = s.get("mandateId")
|
||||
isRoot = rootMandateId is not None and str(mid) == str(rootMandateId)
|
||||
if billingModel == BillingModelEnum.PREPAY_USER.value:
|
||||
defaultCredit = (
|
||||
float(s.get("defaultUserCredit", 0.0) or 0.0) if isRoot else 0.0
|
||||
billingMandateIds = set(
|
||||
s.get("mandateId") for s in allSettings if s.get("mandateId")
|
||||
)
|
||||
else:
|
||||
defaultCredit = 0.0
|
||||
billingMandates[mid] = (billingModel, defaultCredit)
|
||||
|
||||
if not billingMandates:
|
||||
if not billingMandateIds:
|
||||
logger.debug("No billable mandates found, skipping account check")
|
||||
return 0
|
||||
|
||||
# Step 2: Get all existing USER accounts in one query
|
||||
allAccounts = self.db.getRecordset(
|
||||
BillingAccount,
|
||||
recordFilter={"accountType": AccountTypeEnum.USER.value}
|
||||
)
|
||||
allAccounts = self.db.getRecordset(BillingAccount)
|
||||
existingAccountKeys = set()
|
||||
for acc in allAccounts:
|
||||
if not acc.get("userId"):
|
||||
continue
|
||||
key = (acc.get("mandateId"), acc.get("userId"))
|
||||
existingAccountKeys.add(key)
|
||||
|
||||
# Step 3: Get all user-mandate combinations from APP database
|
||||
allUserMandates = appDb.getRecordset(
|
||||
UserMandate,
|
||||
recordFilter={"enabled": True}
|
||||
)
|
||||
|
||||
# Step 4: Create missing accounts
|
||||
for um in allUserMandates:
|
||||
mandateId = um.get("mandateId")
|
||||
userId = um.get("userId")
|
||||
|
|
@ -533,32 +513,20 @@ class BillingObjects:
|
|||
if not mandateId or not userId:
|
||||
continue
|
||||
|
||||
if mandateId not in billingMandates:
|
||||
if mandateId not in billingMandateIds:
|
||||
continue
|
||||
|
||||
key = (mandateId, userId)
|
||||
if key in existingAccountKeys:
|
||||
continue
|
||||
|
||||
billingModel, defaultCredit = billingMandates[mandateId]
|
||||
|
||||
account = BillingAccount(
|
||||
mandateId=mandateId,
|
||||
userId=userId,
|
||||
accountType=AccountTypeEnum.USER,
|
||||
balance=defaultCredit,
|
||||
balance=0.0,
|
||||
enabled=True
|
||||
)
|
||||
created = self.createAccount(account)
|
||||
|
||||
if defaultCredit > 0:
|
||||
self.createTransaction(BillingTransaction(
|
||||
accountId=created["id"],
|
||||
transactionType=TransactionTypeEnum.CREDIT,
|
||||
amount=defaultCredit,
|
||||
description="Initial credit for new user",
|
||||
referenceType=ReferenceTypeEnum.SYSTEM
|
||||
))
|
||||
self.createAccount(account)
|
||||
|
||||
existingAccountKeys.add(key)
|
||||
accountsCreated += 1
|
||||
|
|
@ -662,6 +630,10 @@ class BillingObjects:
|
|||
pagination=pagination,
|
||||
recordFilter=recordFilter
|
||||
)
|
||||
_logBillingTransactionsMissingSysCreatedAt(
|
||||
result["items"],
|
||||
"getTransactions(accountId) paginated",
|
||||
)
|
||||
return PaginatedResult(
|
||||
items=result["items"],
|
||||
totalItems=result["totalItems"],
|
||||
|
|
@ -674,7 +646,7 @@ class BillingObjects:
|
|||
if startDate or endDate:
|
||||
filtered = []
|
||||
for t in results:
|
||||
createdAt = t.get("_createdAt")
|
||||
createdAt = t.get("sysCreatedAt")
|
||||
if createdAt:
|
||||
tDate = createdAt.date() if isinstance(createdAt, datetime) else createdAt
|
||||
if startDate and tDate < startDate:
|
||||
|
|
@ -684,7 +656,7 @@ class BillingObjects:
|
|||
filtered.append(t)
|
||||
results = filtered
|
||||
|
||||
results.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
|
||||
_sortBillingTransactionsBySysCreatedAtDesc(results, "getTransactions(accountId)")
|
||||
|
||||
return results[offset:offset + limit]
|
||||
except Exception as e:
|
||||
|
|
@ -739,7 +711,10 @@ class BillingObjects:
|
|||
transactions = self.getTransactions(account["id"], limit=limit)
|
||||
allTransactions.extend(transactions)
|
||||
|
||||
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
|
||||
_sortBillingTransactionsBySysCreatedAtDesc(
|
||||
allTransactions,
|
||||
"getTransactionsByMandate",
|
||||
)
|
||||
return allTransactions[:limit]
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -810,33 +785,12 @@ class BillingObjects:
|
|||
"""
|
||||
Check if there's sufficient balance for an operation.
|
||||
|
||||
- PREPAY_USER: user.balance >= estimatedCost
|
||||
- PREPAY_MANDATE: mandate pool balance >= estimatedCost
|
||||
|
||||
User accounts are always ensured to exist (for audit trail).
|
||||
Root mandate + PREPAY_USER: initial credit from settings.defaultUserCredit on first create.
|
||||
Missing settings: treated as PREPAY_MANDATE with empty pool (strict).
|
||||
Checks mandate pool balance against estimatedCost.
|
||||
User accounts are ensured to exist for audit trail.
|
||||
Missing settings: treated as PREPAY_MANDATE with empty pool.
|
||||
"""
|
||||
settings = self.getSettings(mandateId)
|
||||
if not settings:
|
||||
billingModel = BillingModelEnum.PREPAY_MANDATE
|
||||
defaultCredit = 0.0
|
||||
else:
|
||||
billingModel = parseBillingModelFromStoredValue(settings.get("billingModel"))
|
||||
defaultCredit = float(settings.get("defaultUserCredit", 0.0) or 0.0)
|
||||
self.getOrCreateUserAccount(mandateId, userId, initialBalance=0.0)
|
||||
|
||||
rootMandateId = _getCachedRootMandateId()
|
||||
isRootMandate = rootMandateId is not None and str(mandateId) == str(rootMandateId)
|
||||
if billingModel == BillingModelEnum.PREPAY_USER:
|
||||
initialBalance = defaultCredit if isRootMandate else 0.0
|
||||
else:
|
||||
initialBalance = 0.0
|
||||
self.getOrCreateUserAccount(mandateId, userId, initialBalance=initialBalance)
|
||||
|
||||
if billingModel == BillingModelEnum.PREPAY_USER:
|
||||
account = self.getUserAccount(mandateId, userId)
|
||||
currentBalance = account.get("balance", 0.0) if account else 0.0
|
||||
else:
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId)
|
||||
currentBalance = poolAccount.get("balance", 0.0)
|
||||
|
||||
|
|
@ -846,10 +800,9 @@ class BillingObjects:
|
|||
reason="INSUFFICIENT_BALANCE",
|
||||
currentBalance=currentBalance,
|
||||
requiredAmount=estimatedCost,
|
||||
billingModel=billingModel,
|
||||
)
|
||||
|
||||
return BillingCheckResult(allowed=True, currentBalance=currentBalance, billingModel=billingModel)
|
||||
return BillingCheckResult(allowed=True, currentBalance=currentBalance)
|
||||
|
||||
def recordUsage(
|
||||
self,
|
||||
|
|
@ -870,10 +823,8 @@ class BillingObjects:
|
|||
"""
|
||||
Record usage cost as a billing transaction.
|
||||
|
||||
Transaction is ALWAYS recorded on the user's account (clean audit trail).
|
||||
Balance is deducted from the appropriate account based on billing model:
|
||||
- PREPAY_USER: deduct from user's own balance
|
||||
- PREPAY_MANDATE: deduct from mandate pool balance
|
||||
Transaction is recorded on the user's account (audit trail).
|
||||
Balance is always deducted from the mandate pool account (PREPAY_MANDATE).
|
||||
"""
|
||||
if priceCHF <= 0:
|
||||
return None
|
||||
|
|
@ -883,9 +834,6 @@ class BillingObjects:
|
|||
logger.debug(f"No billing settings for mandate {mandateId}, skipping usage recording")
|
||||
return None
|
||||
|
||||
billingModel = parseBillingModelFromStoredValue(settings.get("billingModel"))
|
||||
|
||||
# Transaction is ALWAYS on the user's account (audit trail)
|
||||
userAccount = self.getOrCreateUserAccount(mandateId, userId)
|
||||
|
||||
transaction = BillingTransaction(
|
||||
|
|
@ -906,13 +854,164 @@ class BillingObjects:
|
|||
errorCount=errorCount
|
||||
)
|
||||
|
||||
# Determine where to deduct balance
|
||||
if billingModel == BillingModelEnum.PREPAY_USER:
|
||||
return self.createTransaction(transaction)
|
||||
if billingModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId)
|
||||
return self.createTransaction(transaction, balanceAccountId=poolAccount["id"])
|
||||
|
||||
def _parseSettingsDateTime(self, value: Any) -> Optional[datetime]:
|
||||
"""Parse datetime from billing settings row (ISO string or datetime)."""
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, datetime):
|
||||
if value.tzinfo:
|
||||
return value.astimezone(timezone.utc)
|
||||
return value.replace(tzinfo=timezone.utc)
|
||||
if isinstance(value, str):
|
||||
s = value.replace("Z", "+00:00")
|
||||
try:
|
||||
dt = datetime.fromisoformat(s)
|
||||
except ValueError:
|
||||
return None
|
||||
if dt.tzinfo:
|
||||
return dt.astimezone(timezone.utc)
|
||||
return dt.replace(tzinfo=timezone.utc)
|
||||
return None
|
||||
|
||||
def resetStorageBillingPeriod(self, mandateId: str, periodStartAt: datetime) -> None:
|
||||
"""Reset storage watermark state for a new subscription billing period (e.g. Stripe invoice.paid)."""
|
||||
if periodStartAt.tzinfo is None:
|
||||
periodStartAt = periodStartAt.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
periodStartAt = periodStartAt.astimezone(timezone.utc)
|
||||
settings = self.getOrCreateSettings(mandateId)
|
||||
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt"))
|
||||
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2:
|
||||
return
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
|
||||
usedMB = float(_getSubRoot().getMandateDataVolumeMB(mandateId))
|
||||
self.updateSettings(
|
||||
settings["id"],
|
||||
{
|
||||
"storageHighWatermarkMB": usedMB,
|
||||
"storageBilledUpToMB": 0.0,
|
||||
"storagePeriodStartAt": periodStartAt,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
"Storage billing period reset for mandate %s at %s (usedMB=%.2f)",
|
||||
mandateId,
|
||||
periodStartAt.isoformat(),
|
||||
usedMB,
|
||||
)
|
||||
|
||||
def reconcileMandateStorageBilling(self, mandateId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Debit prepay pool for new storage overage using period high-watermark (no credit on delete)."""
|
||||
settings = self.getSettings(mandateId)
|
||||
if not settings:
|
||||
return None
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.datamodels.datamodelSubscription import _getPlan
|
||||
|
||||
subIface = _getSubRoot()
|
||||
usedMB = float(subIface.getMandateDataVolumeMB(mandateId))
|
||||
sub = subIface.getOperativeForMandate(mandateId)
|
||||
plan = _getPlan(sub.get("planKey", "")) if sub else None
|
||||
includedMB = plan.maxDataVolumeMB if plan and plan.maxDataVolumeMB is not None else None
|
||||
if includedMB is None:
|
||||
return None
|
||||
|
||||
prevHigh = float(settings.get("storageHighWatermarkMB") or 0.0)
|
||||
high = max(prevHigh, usedMB)
|
||||
overageMB = max(0.0, high - float(includedMB))
|
||||
billed = float(settings.get("storageBilledUpToMB") or 0.0)
|
||||
deltaOverage = overageMB - billed
|
||||
settingsUpdates: Dict[str, Any] = {}
|
||||
if high != prevHigh:
|
||||
settingsUpdates["storageHighWatermarkMB"] = high
|
||||
if deltaOverage <= 1e-9:
|
||||
if settingsUpdates:
|
||||
self.updateSettings(settings["id"], settingsUpdates)
|
||||
return None
|
||||
|
||||
costCHF = round((deltaOverage / 1024.0) * float(STORAGE_PRICE_PER_GB_CHF), 4)
|
||||
if costCHF <= 0:
|
||||
if settingsUpdates:
|
||||
self.updateSettings(settings["id"], settingsUpdates)
|
||||
return None
|
||||
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId)
|
||||
transaction = BillingTransaction(
|
||||
accountId=poolAccount["id"],
|
||||
transactionType=TransactionTypeEnum.DEBIT,
|
||||
amount=costCHF,
|
||||
description=f"Speicher-Überhang ({deltaOverage:.2f} MB über Plan)",
|
||||
referenceType=ReferenceTypeEnum.STORAGE,
|
||||
referenceId=mandateId,
|
||||
)
|
||||
created = self.createTransaction(transaction)
|
||||
settingsUpdates["storageBilledUpToMB"] = overageMB
|
||||
self.updateSettings(settings["id"], settingsUpdates)
|
||||
logger.info(
|
||||
"Storage overage billed mandate=%s deltaOverageMB=%.4f costCHF=%s",
|
||||
mandateId,
|
||||
deltaOverage,
|
||||
costCHF,
|
||||
)
|
||||
return created
|
||||
|
||||
# =========================================================================
|
||||
# Subscription AI-Budget Credit
|
||||
# =========================================================================
|
||||
|
||||
def creditSubscriptionBudget(self, mandateId: str, planKey: str, periodLabel: str = "") -> Optional[Dict[str, Any]]:
|
||||
"""Credit the plan's budgetAiCHF to the mandate pool account.
|
||||
|
||||
Should be called once per billing period (initial activation + each invoice.paid).
|
||||
Returns the created CREDIT transaction or None if budget is 0."""
|
||||
from modules.datamodels.datamodelSubscription import _getPlan
|
||||
|
||||
plan = _getPlan(planKey)
|
||||
if not plan or not plan.budgetAiCHF or plan.budgetAiCHF <= 0:
|
||||
return None
|
||||
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId)
|
||||
description = f"AI-Budget ({planKey})"
|
||||
if periodLabel:
|
||||
description += f" – {periodLabel}"
|
||||
|
||||
transaction = BillingTransaction(
|
||||
accountId=poolAccount["id"],
|
||||
transactionType=TransactionTypeEnum.CREDIT,
|
||||
amount=plan.budgetAiCHF,
|
||||
description=description,
|
||||
referenceType=ReferenceTypeEnum.SUBSCRIPTION,
|
||||
referenceId=mandateId,
|
||||
)
|
||||
created = self.createTransaction(transaction)
|
||||
logger.info(
|
||||
"AI-Budget credited mandate=%s plan=%s amount=%.2f CHF",
|
||||
mandateId, planKey, plan.budgetAiCHF,
|
||||
)
|
||||
return created
|
||||
|
||||
def ensureActivationBudget(self, mandateId: str, planKey: str) -> Optional[Dict[str, Any]]:
|
||||
"""Idempotent: credit the activation budget only if no SUBSCRIPTION credit exists yet."""
|
||||
poolAccount = self.getMandateAccount(mandateId)
|
||||
if not poolAccount:
|
||||
return self.creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
|
||||
|
||||
existing = self.db.getRecordset(
|
||||
BillingTransaction,
|
||||
recordFilter={
|
||||
"accountId": poolAccount["id"],
|
||||
"transactionType": TransactionTypeEnum.CREDIT.value,
|
||||
"referenceType": ReferenceTypeEnum.SUBSCRIPTION.value,
|
||||
},
|
||||
)
|
||||
if existing:
|
||||
return None
|
||||
|
||||
return self.creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
|
||||
|
||||
# =========================================================================
|
||||
# Workflow Cost Query
|
||||
|
|
@ -928,112 +1027,6 @@ class BillingObjects:
|
|||
)
|
||||
return sum(t.get("amount", 0.0) for t in transactions)
|
||||
|
||||
# =========================================================================
|
||||
# Billing Model Switch Operations
|
||||
# =========================================================================
|
||||
|
||||
def switchBillingModel(self, mandateId: str, oldModel: BillingModelEnum, newModel: BillingModelEnum) -> Dict[str, Any]:
|
||||
"""
|
||||
Switch billing model with budget migration logged as BillingTransactions.
|
||||
|
||||
PREPAY_MANDATE -> PREPAY_USER: pool debited, equal shares credited to user accounts.
|
||||
PREPAY_USER -> PREPAY_MANDATE: user wallets debited, pool credited with sum.
|
||||
"""
|
||||
result = {"oldModel": oldModel.value, "newModel": newModel.value, "migratedAmount": 0.0, "userCount": 0}
|
||||
|
||||
if oldModel == newModel:
|
||||
return result
|
||||
|
||||
if oldModel == BillingModelEnum.PREPAY_MANDATE and newModel == BillingModelEnum.PREPAY_USER:
|
||||
poolAccount = self.getMandateAccount(mandateId)
|
||||
userAccounts = self.db.getRecordset(
|
||||
BillingAccount,
|
||||
recordFilter={"mandateId": mandateId, "accountType": AccountTypeEnum.USER.value}
|
||||
)
|
||||
poolBalance = poolAccount.get("balance", 0.0) if poolAccount else 0.0
|
||||
n = len(userAccounts)
|
||||
if poolAccount and poolBalance > 0:
|
||||
self.createTransaction(
|
||||
BillingTransaction(
|
||||
accountId=poolAccount["id"],
|
||||
transactionType=TransactionTypeEnum.DEBIT,
|
||||
amount=poolBalance,
|
||||
description="Model switch: distributed from mandate pool to user wallets",
|
||||
referenceType=ReferenceTypeEnum.SYSTEM,
|
||||
)
|
||||
)
|
||||
result["migratedAmount"] = poolBalance
|
||||
if n > 0:
|
||||
remaining = poolBalance
|
||||
for i, acc in enumerate(userAccounts):
|
||||
if i == n - 1:
|
||||
share = round(remaining, 4)
|
||||
else:
|
||||
share = round(poolBalance / n, 4)
|
||||
remaining -= share
|
||||
if share > 0:
|
||||
self.createTransaction(
|
||||
BillingTransaction(
|
||||
accountId=acc["id"],
|
||||
transactionType=TransactionTypeEnum.CREDIT,
|
||||
amount=share,
|
||||
description="Model switch: share from mandate pool",
|
||||
referenceType=ReferenceTypeEnum.SYSTEM,
|
||||
)
|
||||
)
|
||||
result["userCount"] = n
|
||||
logger.info(
|
||||
"Switched %s MANDATE->USER: migrated %.4f CHF to %d user account(s) (transactions logged)",
|
||||
mandateId,
|
||||
result["migratedAmount"],
|
||||
result["userCount"],
|
||||
)
|
||||
return result
|
||||
|
||||
if oldModel == BillingModelEnum.PREPAY_USER and newModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
userAccounts = self.db.getRecordset(
|
||||
BillingAccount,
|
||||
recordFilter={"mandateId": mandateId, "accountType": AccountTypeEnum.USER.value}
|
||||
)
|
||||
totalUserBalance = sum(acc.get("balance", 0.0) for acc in userAccounts)
|
||||
for acc in userAccounts:
|
||||
b = acc.get("balance", 0.0)
|
||||
if b > 0:
|
||||
self.createTransaction(
|
||||
BillingTransaction(
|
||||
accountId=acc["id"],
|
||||
transactionType=TransactionTypeEnum.DEBIT,
|
||||
amount=b,
|
||||
description="Model switch: consolidated to mandate pool",
|
||||
referenceType=ReferenceTypeEnum.SYSTEM,
|
||||
)
|
||||
)
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId, initialBalance=0.0)
|
||||
if totalUserBalance > 0:
|
||||
self.createTransaction(
|
||||
BillingTransaction(
|
||||
accountId=poolAccount["id"],
|
||||
transactionType=TransactionTypeEnum.CREDIT,
|
||||
amount=totalUserBalance,
|
||||
description="Model switch: consolidated from user accounts",
|
||||
referenceType=ReferenceTypeEnum.SYSTEM,
|
||||
)
|
||||
)
|
||||
result["migratedAmount"] = totalUserBalance
|
||||
result["userCount"] = len(userAccounts)
|
||||
logger.info(
|
||||
"Switched %s USER->MANDATE: consolidated %.4f CHF from %d users into pool (transactions logged)",
|
||||
mandateId,
|
||||
totalUserBalance,
|
||||
len(userAccounts),
|
||||
)
|
||||
return result
|
||||
|
||||
if newModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
self.getOrCreateMandateAccount(mandateId, initialBalance=0.0)
|
||||
|
||||
return result
|
||||
|
||||
# =========================================================================
|
||||
# Statistics Operations
|
||||
# =========================================================================
|
||||
|
|
@ -1128,10 +1121,8 @@ class BillingObjects:
|
|||
def getBalancesForUser(self, userId: str) -> List[BillingBalanceResponse]:
|
||||
"""
|
||||
Get all billing balances for a user across mandates.
|
||||
Shows the mandate pool balance (shared budget visible to user).
|
||||
|
||||
Shows the effective available budget:
|
||||
- PREPAY_USER: user's own account balance
|
||||
- PREPAY_MANDATE: mandate pool balance (shared budget visible to user)
|
||||
Args:
|
||||
userId: User ID
|
||||
|
||||
|
|
@ -1154,7 +1145,7 @@ class BillingObjects:
|
|||
continue
|
||||
|
||||
mandate = rootInterface.getMandate(mandateId)
|
||||
if not mandate:
|
||||
if not mandate or not getattr(mandate, "enabled", True):
|
||||
continue
|
||||
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
|
|
@ -1163,27 +1154,15 @@ class BillingObjects:
|
|||
if not settings:
|
||||
continue
|
||||
|
||||
billingModel = parseBillingModelFromStoredValue(settings.get("billingModel"))
|
||||
|
||||
if billingModel == BillingModelEnum.PREPAY_USER:
|
||||
account = self.getOrCreateUserAccount(mandateId, userId)
|
||||
if not account:
|
||||
continue
|
||||
balance = account.get("balance", 0.0)
|
||||
warningThreshold = account.get("warningThreshold", 0.0)
|
||||
elif billingModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
poolAccount = self.getOrCreateMandateAccount(mandateId)
|
||||
if not poolAccount:
|
||||
continue
|
||||
balance = poolAccount.get("balance", 0.0)
|
||||
warningThreshold = poolAccount.get("warningThreshold", 0.0)
|
||||
else:
|
||||
continue
|
||||
|
||||
balances.append(BillingBalanceResponse(
|
||||
mandateId=mandateId,
|
||||
mandateName=mandateName,
|
||||
billingModel=billingModel,
|
||||
balance=balance,
|
||||
warningThreshold=warningThreshold,
|
||||
isWarning=balance <= warningThreshold,
|
||||
|
|
@ -1244,7 +1223,7 @@ class BillingObjects:
|
|||
except Exception as e:
|
||||
logger.error(f"Error getting transactions for user: {e}")
|
||||
|
||||
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
|
||||
_sortBillingTransactionsBySysCreatedAtDesc(allTransactions, "getTransactionsForUser")
|
||||
return allTransactions[:limit]
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -1280,36 +1259,25 @@ class BillingObjects:
|
|||
if not mandateId:
|
||||
continue
|
||||
|
||||
billingModel = parseBillingModelFromStoredValue(settings.get("billingModel"))
|
||||
|
||||
# Get mandate info
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
mandateName = ""
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
|
||||
# Get user accounts count (always exist now for audit trail)
|
||||
userAccounts = self.db.getRecordset(
|
||||
allMandateAccounts = self.db.getRecordset(
|
||||
BillingAccount,
|
||||
recordFilter={"mandateId": mandateId, "accountType": AccountTypeEnum.USER.value}
|
||||
recordFilter={"mandateId": mandateId}
|
||||
)
|
||||
userCount = len(userAccounts)
|
||||
userCount = sum(1 for acc in allMandateAccounts if acc.get("userId"))
|
||||
|
||||
if billingModel == BillingModelEnum.PREPAY_USER:
|
||||
totalBalance = sum(acc.get("balance", 0.0) for acc in userAccounts)
|
||||
elif billingModel == BillingModelEnum.PREPAY_MANDATE:
|
||||
poolAccount = self.getMandateAccount(mandateId)
|
||||
totalBalance = poolAccount.get("balance", 0.0) if poolAccount else 0.0
|
||||
else:
|
||||
totalBalance = 0.0
|
||||
|
||||
balances.append({
|
||||
"mandateId": mandateId,
|
||||
"mandateName": mandateName,
|
||||
"billingModel": billingModel.value,
|
||||
"totalBalance": totalBalance,
|
||||
"userCount": userCount,
|
||||
"defaultUserCredit": float(settings.get("defaultUserCredit", 0.0) or 0.0),
|
||||
"warningThresholdPercent": settings.get("warningThresholdPercent", 10.0),
|
||||
})
|
||||
|
||||
|
|
@ -1361,7 +1329,7 @@ class BillingObjects:
|
|||
logger.error(f"Error getting mandate transactions: {e}")
|
||||
|
||||
# Sort by creation date descending and limit
|
||||
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
|
||||
_sortBillingTransactionsBySysCreatedAtDesc(allTransactions, "getMandateTransactions")
|
||||
return allTransactions[:limit]
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -1385,9 +1353,8 @@ class BillingObjects:
|
|||
try:
|
||||
appInterface = getAppInterface(self.currentUser)
|
||||
|
||||
# Get all user accounts
|
||||
accountFilter = {"accountType": AccountTypeEnum.USER.value}
|
||||
allAccounts = self.db.getRecordset(BillingAccount, recordFilter=accountFilter)
|
||||
allAccounts = self.db.getRecordset(BillingAccount)
|
||||
allAccounts = [acc for acc in allAccounts if acc.get("userId")]
|
||||
|
||||
# Filter by mandate if specified
|
||||
if mandateIds:
|
||||
|
|
@ -1549,5 +1516,5 @@ class BillingObjects:
|
|||
logger.error(f"Error getting user transactions for mandates: {e}")
|
||||
|
||||
# Sort by creation date descending and limit
|
||||
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
|
||||
_sortBillingTransactionsBySysCreatedAtDesc(allTransactions, "getUserTransactionsForMandates")
|
||||
return allTransactions[:limit]
|
||||
|
|
|
|||
|
|
@ -251,9 +251,8 @@ class ChatObjects:
|
|||
objectFields[fieldName] = value
|
||||
else:
|
||||
# Field not in model - treat as scalar if simple, otherwise filter out
|
||||
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
|
||||
# Underscore-prefixed keys (e.g. UI meta) pass through; sys* live on PowerOnModel subclasses
|
||||
if fieldName.startswith("_"):
|
||||
# Metadata fields should be passed through to connector
|
||||
simpleFields[fieldName] = value
|
||||
elif isinstance(value, (str, int, float, bool, type(None))):
|
||||
simpleFields[fieldName] = value
|
||||
|
|
@ -652,6 +651,32 @@ class ChatObjects:
|
|||
totalPages=totalPages
|
||||
)
|
||||
|
||||
def getLastMessageTimestamp(self, workflowId: str) -> Optional[str]:
|
||||
"""Return the latest publishedAt/sysCreatedAt from ChatMessage for a workflow."""
|
||||
messages = self._getRecordset(ChatMessage, recordFilter={"workflowId": workflowId})
|
||||
if not messages:
|
||||
return None
|
||||
latest = None
|
||||
for msg in messages:
|
||||
ts = msg.get("publishedAt") or msg.get("sysCreatedAt")
|
||||
if ts and (latest is None or str(ts) > str(latest)):
|
||||
latest = ts
|
||||
return str(latest) if latest else None
|
||||
|
||||
def searchWorkflowsByContent(self, query: str, limit: int = 50) -> List[str]:
|
||||
"""Return workflow IDs whose messages contain the query string (case-insensitive)."""
|
||||
allMessages = self._getRecordset(ChatMessage)
|
||||
matchedIds: set = set()
|
||||
for msg in allMessages:
|
||||
content = msg.get("message") or ""
|
||||
if query in content.lower():
|
||||
wfId = msg.get("workflowId")
|
||||
if wfId:
|
||||
matchedIds.add(wfId)
|
||||
if len(matchedIds) >= limit:
|
||||
break
|
||||
return list(matchedIds)
|
||||
|
||||
def getWorkflow(self, workflowId: str) -> Optional[ChatWorkflow]:
|
||||
"""Returns a workflow by ID if user has access."""
|
||||
# Use RBAC filtering with featureInstanceId for instance-level isolation
|
||||
|
|
@ -885,7 +910,7 @@ class ChatObjects:
|
|||
"role": msg.get("role", "assistant"),
|
||||
"status": msg.get("status", "step"),
|
||||
"sequenceNr": msg.get("sequenceNr", 0),
|
||||
"publishedAt": msg.get("publishedAt") or msg.get("_createdAt") or msg.get("timestamp") or 0,
|
||||
"publishedAt": msg.get("publishedAt") or msg.get("sysCreatedAt") or msg.get("timestamp") or 0,
|
||||
"success": msg.get("success"),
|
||||
"actionId": msg.get("actionId"),
|
||||
"actionMethod": msg.get("actionMethod"),
|
||||
|
|
@ -1268,7 +1293,7 @@ class ChatObjects:
|
|||
# CASCADE DELETE: Delete all related data first
|
||||
|
||||
# 1. Delete message documents (but NOT the files themselves)
|
||||
# Bypass RBAC -- workflow access already verified, child records may have different _createdBy
|
||||
# Bypass RBAC -- workflow access already verified, child records may have different sysCreatedBy
|
||||
existing_docs = self.db.getRecordset(ChatDocument, recordFilter={"messageId": messageId})
|
||||
for doc in existing_docs:
|
||||
self.db.recordDelete(ChatDocument, doc["id"])
|
||||
|
|
@ -1296,7 +1321,7 @@ class ChatObjects:
|
|||
|
||||
|
||||
# Get documents for this message from normalized table
|
||||
# Bypass RBAC -- workflow access already verified, child records may have different _createdBy
|
||||
# Bypass RBAC -- workflow access already verified, child records may have different sysCreatedBy
|
||||
documents = self.db.getRecordset(ChatDocument, recordFilter={"messageId": messageId})
|
||||
|
||||
if not documents:
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ class KnowledgeObjects:
|
|||
def __init__(self):
|
||||
self.currentUser: Optional[User] = None
|
||||
self.userId: Optional[str] = None
|
||||
self._scopeCache: Dict[str, List[str]] = {}
|
||||
self._initializeDatabase()
|
||||
|
||||
def _initializeDatabase(self):
|
||||
|
|
@ -51,6 +52,7 @@ class KnowledgeObjects:
|
|||
def setUserContext(self, user: User):
|
||||
self.currentUser = user
|
||||
self.userId = user.id if user else None
|
||||
self._scopeCache = {}
|
||||
if self.userId:
|
||||
self.db.updateContext(self.userId)
|
||||
|
||||
|
|
@ -89,10 +91,20 @@ class KnowledgeObjects:
|
|||
|
||||
def deleteFileContentIndex(self, fileId: str) -> bool:
|
||||
"""Delete a FileContentIndex and all associated ContentChunks."""
|
||||
existing = self.getFileContentIndex(fileId)
|
||||
mandateId = (existing or {}).get("mandateId") or ""
|
||||
chunks = self.db.getRecordset(ContentChunk, recordFilter={"fileId": fileId})
|
||||
for chunk in chunks:
|
||||
self.db.recordDelete(ContentChunk, chunk["id"])
|
||||
return self.db.recordDelete(FileContentIndex, fileId)
|
||||
ok = self.db.recordDelete(FileContentIndex, fileId)
|
||||
if ok and mandateId:
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
|
||||
_getRootInterface().reconcileMandateStorageBilling(str(mandateId))
|
||||
except Exception as ex:
|
||||
logger.warning("reconcileMandateStorageBilling after delete failed: %s", ex)
|
||||
return ok
|
||||
|
||||
# =========================================================================
|
||||
# ContentChunk CRUD
|
||||
|
|
@ -215,25 +227,88 @@ class KnowledgeObjects:
|
|||
# Semantic Search
|
||||
# =========================================================================
|
||||
|
||||
def _buildScopeFilter(self, userId: str = None, featureInstanceId: str = None, mandateId: str = None) -> dict:
|
||||
"""Build a scope-aware filter for RAG queries.
|
||||
Returns a filter dict that includes records visible to this user context."""
|
||||
return {
|
||||
"userId": userId,
|
||||
"featureInstanceId": featureInstanceId,
|
||||
"mandateId": mandateId,
|
||||
}
|
||||
|
||||
def _getScopedFileIds(self, userId: str = None, featureInstanceId: str = None, mandateId: str = None, isSysAdmin: bool = False) -> List[str]:
|
||||
"""Collect FileContentIndex IDs visible under the scope union:
|
||||
- scope=personal AND userId matches
|
||||
- scope=featureInstance AND featureInstanceId matches
|
||||
- scope=mandate AND mandateId matches
|
||||
- scope=global (only if isSysAdmin)
|
||||
"""
|
||||
_cacheKey = f"{userId}:{featureInstanceId}:{mandateId}:{isSysAdmin}"
|
||||
if _cacheKey in self._scopeCache:
|
||||
return self._scopeCache[_cacheKey]
|
||||
|
||||
allIds: set = set()
|
||||
|
||||
if isSysAdmin:
|
||||
globalIndexes = self.db.getRecordset(
|
||||
FileContentIndex, recordFilter={"scope": "global"}
|
||||
)
|
||||
for idx in globalIndexes:
|
||||
fid = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
if fid:
|
||||
allIds.add(fid)
|
||||
|
||||
if userId:
|
||||
personalIndexes = self.db.getRecordset(
|
||||
FileContentIndex, recordFilter={"scope": "personal", "userId": userId}
|
||||
)
|
||||
for idx in personalIndexes:
|
||||
fid = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
if fid:
|
||||
allIds.add(fid)
|
||||
|
||||
if featureInstanceId:
|
||||
instanceIndexes = self.db.getRecordset(
|
||||
FileContentIndex, recordFilter={"scope": "featureInstance", "featureInstanceId": featureInstanceId}
|
||||
)
|
||||
for idx in instanceIndexes:
|
||||
fid = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
if fid:
|
||||
allIds.add(fid)
|
||||
|
||||
if mandateId:
|
||||
mandateIndexes = self.db.getRecordset(
|
||||
FileContentIndex, recordFilter={"scope": "mandate", "mandateId": mandateId}
|
||||
)
|
||||
for idx in mandateIndexes:
|
||||
fid = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
if fid:
|
||||
allIds.add(fid)
|
||||
|
||||
self._scopeCache[_cacheKey] = list(allIds)
|
||||
return self._scopeCache[_cacheKey]
|
||||
|
||||
def semanticSearch(
|
||||
self,
|
||||
queryVector: List[float],
|
||||
userId: str = None,
|
||||
featureInstanceId: str = None,
|
||||
mandateId: str = None,
|
||||
isShared: bool = None,
|
||||
scope: str = None,
|
||||
limit: int = 10,
|
||||
minScore: float = None,
|
||||
contentType: str = None,
|
||||
isSysAdmin: bool = False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Semantic search across ContentChunks using pgvector cosine similarity.
|
||||
|
||||
Args:
|
||||
queryVector: Query embedding vector.
|
||||
userId: Filter by user (Instance Layer).
|
||||
userId: Filter by user (personal scope).
|
||||
featureInstanceId: Filter by feature instance.
|
||||
mandateId: Filter by mandate (for Shared Layer lookups).
|
||||
isShared: If True, search Shared Layer via FileContentIndex join.
|
||||
mandateId: Filter by mandate (scope=mandate means visible to all mandate users).
|
||||
scope: If provided, filter by this specific scope value.
|
||||
If not provided, use scope-union approach (personal + featureInstance + mandate + global).
|
||||
limit: Max results.
|
||||
minScore: Minimum cosine similarity (0.0 - 1.0).
|
||||
contentType: Filter by content type (text, image, etc.).
|
||||
|
|
@ -242,25 +317,36 @@ class KnowledgeObjects:
|
|||
List of ContentChunk records with _score field, sorted by relevance.
|
||||
"""
|
||||
recordFilter = {}
|
||||
if userId:
|
||||
recordFilter["userId"] = userId
|
||||
if featureInstanceId:
|
||||
recordFilter["featureInstanceId"] = featureInstanceId
|
||||
if contentType:
|
||||
recordFilter["contentType"] = contentType
|
||||
|
||||
if isShared and mandateId:
|
||||
sharedIndexes = self.db.getRecordset(
|
||||
FileContentIndex,
|
||||
recordFilter={"mandateId": mandateId, "isShared": True},
|
||||
if scope:
|
||||
scopeFilter: Dict[str, Any] = {"scope": scope}
|
||||
if mandateId:
|
||||
scopeFilter["mandateId"] = mandateId
|
||||
if featureInstanceId:
|
||||
scopeFilter["featureInstanceId"] = featureInstanceId
|
||||
scopedFileIds = self.db.getRecordset(
|
||||
FileContentIndex, recordFilter=scopeFilter
|
||||
)
|
||||
sharedFileIds = [idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None) for idx in sharedIndexes]
|
||||
sharedFileIds = [fid for fid in sharedFileIds if fid]
|
||||
if not sharedFileIds:
|
||||
fileIds = [
|
||||
idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
for idx in scopedFileIds
|
||||
]
|
||||
fileIds = [fid for fid in fileIds if fid]
|
||||
if not fileIds:
|
||||
return []
|
||||
recordFilter.pop("userId", None)
|
||||
recordFilter.pop("featureInstanceId", None)
|
||||
recordFilter["fileId"] = sharedFileIds
|
||||
recordFilter["fileId"] = fileIds
|
||||
elif userId or featureInstanceId or mandateId:
|
||||
scopedFileIds = self._getScopedFileIds(
|
||||
userId=userId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
mandateId=mandateId,
|
||||
isSysAdmin=isSysAdmin,
|
||||
)
|
||||
if not scopedFileIds:
|
||||
return []
|
||||
recordFilter["fileId"] = scopedFileIds
|
||||
|
||||
return self.db.semanticSearch(
|
||||
modelClass=ContentChunk,
|
||||
|
|
@ -317,7 +403,7 @@ class KnowledgeObjects:
|
|||
if mandateId:
|
||||
files_shared = self.db.getRecordset(
|
||||
FileContentIndex,
|
||||
recordFilter={"mandateId": mandateId, "isShared": True},
|
||||
recordFilter={"mandateId": mandateId, "scope": "mandate"},
|
||||
)
|
||||
|
||||
by_id: Dict[str, Dict[str, Any]] = {}
|
||||
|
|
@ -466,6 +552,76 @@ class KnowledgeObjects:
|
|||
}
|
||||
|
||||
|
||||
def aggregateMandateRagTotalBytes(mandateId: str) -> int:
|
||||
"""Sum FileContentIndex.totalSize for a mandate.
|
||||
|
||||
Primary strategy (relies on correct scope fields on FileContentIndex):
|
||||
1. FileContentIndex rows with mandateId on the index
|
||||
2. FileContentIndex rows with featureInstanceId of any mandate FeatureInstance
|
||||
Deduplicates by id.
|
||||
"""
|
||||
if not mandateId:
|
||||
return 0
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
|
||||
knowDb = getInterface(None).db
|
||||
appDb = getRootInterface().db
|
||||
byId: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
for row in knowDb.getRecordset(FileContentIndex, recordFilter={"mandateId": mandateId}):
|
||||
rid = row.get("id")
|
||||
if rid:
|
||||
byId[str(rid)] = row
|
||||
|
||||
instances = appDb.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId})
|
||||
instIds = [str(inst.get("id", "")) for inst in instances if inst.get("id")]
|
||||
|
||||
for instId in instIds:
|
||||
for row in knowDb.getRecordset(FileContentIndex, recordFilter={"featureInstanceId": instId}):
|
||||
rid = row.get("id")
|
||||
if rid and str(rid) not in byId:
|
||||
byId[str(rid)] = row
|
||||
|
||||
# DEPRECATED: file-ID-correlation fallback from poweron_management.
|
||||
# Only needed for pre-migration data where mandateId/featureInstanceId on the
|
||||
# FileContentIndex are empty. Remove once migrateRagScopeFields has been run.
|
||||
_fallbackCount = 0
|
||||
try:
|
||||
from modules.datamodels.datamodelFiles import FileItem
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
mgmtDb = ComponentObjects().db
|
||||
knowledgeIf = getInterface(None)
|
||||
|
||||
fileIds: set = set()
|
||||
for f in mgmtDb.getRecordset(FileItem, recordFilter={"mandateId": mandateId}):
|
||||
fid = f.get("id") if isinstance(f, dict) else getattr(f, "id", None)
|
||||
if fid:
|
||||
fileIds.add(str(fid))
|
||||
for instId in instIds:
|
||||
for f in mgmtDb.getRecordset(FileItem, recordFilter={"featureInstanceId": instId}):
|
||||
fid = f.get("id") if isinstance(f, dict) else getattr(f, "id", None)
|
||||
if fid:
|
||||
fileIds.add(str(fid))
|
||||
|
||||
for fid in fileIds:
|
||||
if fid in byId:
|
||||
continue
|
||||
row = knowledgeIf.getFileContentIndex(fid)
|
||||
if row:
|
||||
byId[fid] = row
|
||||
_fallbackCount += 1
|
||||
except Exception as e:
|
||||
logger.warning("aggregateMandateRagTotalBytes fallback failed: %s", e)
|
||||
|
||||
total = sum(int(r.get("totalSize") or 0) for r in byId.values())
|
||||
logger.info(
|
||||
"aggregateMandateRagTotalBytes(%s): %d indexes, %d bytes (fallback: %d)",
|
||||
mandateId, len(byId), total, _fallbackCount,
|
||||
)
|
||||
return total
|
||||
|
||||
|
||||
def getInterface(currentUser: Optional[User] = None) -> KnowledgeObjects:
|
||||
"""Get or create a KnowledgeObjects singleton."""
|
||||
if "default" not in _instances:
|
||||
|
|
|
|||
|
|
@ -175,12 +175,7 @@ class ComponentObjects:
|
|||
# Complex objects that should be filtered out
|
||||
objectFields[fieldName] = value
|
||||
else:
|
||||
# Field not in model - treat as scalar if simple, otherwise filter out
|
||||
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
|
||||
if fieldName.startswith("_"):
|
||||
# Metadata fields should be passed through to connector
|
||||
simpleFields[fieldName] = value
|
||||
elif isinstance(value, (str, int, float, bool, type(None))):
|
||||
if isinstance(value, (str, int, float, bool, type(None))):
|
||||
simpleFields[fieldName] = value
|
||||
else:
|
||||
objectFields[fieldName] = value
|
||||
|
|
@ -609,7 +604,7 @@ class ComponentObjects:
|
|||
"""
|
||||
isSysAdmin = self._isSysAdmin()
|
||||
for prompt in prompts:
|
||||
isOwner = prompt.get("_createdBy") == self.userId
|
||||
isOwner = prompt.get("sysCreatedBy") == self.userId
|
||||
prompt["_permissions"] = {
|
||||
"canUpdate": isOwner or isSysAdmin,
|
||||
"canDelete": isOwner or isSysAdmin
|
||||
|
|
@ -621,13 +616,13 @@ class ComponentObjects:
|
|||
|
||||
Visibility rules:
|
||||
- SysAdmin: ALL prompts
|
||||
- Regular user: own prompts (_createdBy) + system prompts (isSystem=True)
|
||||
- Regular user: own prompts (sysCreatedBy) + system prompts (isSystem=True)
|
||||
"""
|
||||
if self._isSysAdmin():
|
||||
return self.db.getRecordset(Prompt)
|
||||
|
||||
# Get own prompts
|
||||
ownPrompts = self.db.getRecordset(Prompt, recordFilter={"_createdBy": self.userId})
|
||||
ownPrompts = self.db.getRecordset(Prompt, recordFilter={"sysCreatedBy": self.userId})
|
||||
|
||||
# Get system prompts
|
||||
systemPrompts = self.db.getRecordset(Prompt, recordFilter={"isSystem": True})
|
||||
|
|
@ -716,7 +711,7 @@ class ComponentObjects:
|
|||
|
||||
# Visibility check for non-SysAdmin: must be owner or system prompt
|
||||
if not self._isSysAdmin():
|
||||
isOwner = prompt.get("_createdBy") == self.userId
|
||||
isOwner = prompt.get("sysCreatedBy") == self.userId
|
||||
isSystem = prompt.get("isSystem", False)
|
||||
if not isOwner and not isSystem:
|
||||
return None
|
||||
|
|
@ -747,7 +742,7 @@ class ComponentObjects:
|
|||
raise ValueError(f"Prompt {promptId} not found")
|
||||
|
||||
# Permission check: owner or SysAdmin
|
||||
isOwner = (getattr(prompt, '_createdBy', None) == self.userId)
|
||||
isOwner = (getattr(prompt, 'sysCreatedBy', None) == self.userId)
|
||||
if not self._isSysAdmin() and not isOwner:
|
||||
raise PermissionError(f"No permission to update prompt {promptId}")
|
||||
|
||||
|
|
@ -784,7 +779,7 @@ class ComponentObjects:
|
|||
return False
|
||||
|
||||
# Permission check: owner or SysAdmin
|
||||
isOwner = (getattr(prompt, '_createdBy', None) == self.userId)
|
||||
isOwner = (getattr(prompt, 'sysCreatedBy', None) == self.userId)
|
||||
if not self._isSysAdmin() and not isOwner:
|
||||
raise PermissionError(f"No permission to delete prompt {promptId}")
|
||||
|
||||
|
|
@ -798,7 +793,7 @@ class ComponentObjects:
|
|||
def checkForDuplicateFile(self, fileHash: str, fileName: str) -> Optional[FileItem]:
|
||||
"""Checks if a file with the same hash AND fileName already exists for the current user.
|
||||
|
||||
Duplicate = same user (_createdBy) + same fileHash + same fileName.
|
||||
Duplicate = same user (sysCreatedBy) + same fileHash + same fileName.
|
||||
Same hash with different name is allowed (intentional copy by user).
|
||||
Uses direct DB query (not RBAC) because files are isolated per user.
|
||||
"""
|
||||
|
|
@ -809,7 +804,7 @@ class ComponentObjects:
|
|||
matchingFiles = self.db.getRecordset(
|
||||
FileItem,
|
||||
recordFilter={
|
||||
"_createdBy": self.userId,
|
||||
"sysCreatedBy": self.userId,
|
||||
"fileHash": fileHash,
|
||||
"fileName": fileName
|
||||
}
|
||||
|
|
@ -828,7 +823,7 @@ class ComponentObjects:
|
|||
mimeType=file["mimeType"],
|
||||
fileHash=file["fileHash"],
|
||||
fileSize=file["fileSize"],
|
||||
creationDate=file["creationDate"]
|
||||
sysCreatedAt=file.get("sysCreatedAt") or file.get("creationDate"),
|
||||
)
|
||||
|
||||
def getMimeType(self, fileName: str) -> str:
|
||||
|
|
@ -908,7 +903,7 @@ class ComponentObjects:
|
|||
def _getFilesByCurrentUser(self, recordFilter: Dict[str, Any] = None) -> List[Dict[str, Any]]:
|
||||
"""Files are always user-scoped. Returns only files owned by the current user,
|
||||
regardless of role (including SysAdmin). This bypasses RBAC intentionally."""
|
||||
filterDict = {"_createdBy": self.userId}
|
||||
filterDict = {"sysCreatedBy": self.userId}
|
||||
if recordFilter:
|
||||
filterDict.update(recordFilter)
|
||||
return self.db.getRecordset(FileItem, recordFilter=filterDict)
|
||||
|
|
@ -927,20 +922,27 @@ class ComponentObjects:
|
|||
If pagination is provided: PaginatedResult with items and metadata
|
||||
"""
|
||||
# User-scoping filter: every user only sees their own files (bypasses RBAC SysAdmin override)
|
||||
recordFilter = {"_createdBy": self.userId}
|
||||
recordFilter = {"sysCreatedBy": self.userId}
|
||||
|
||||
def _convertFileItems(files):
|
||||
fileItems = []
|
||||
for file in files:
|
||||
try:
|
||||
creationDate = file.get("creationDate")
|
||||
if creationDate is None or not isinstance(creationDate, (int, float)) or creationDate <= 0:
|
||||
file["creationDate"] = getUtcTimestamp()
|
||||
sysCreatedAt = file.get("sysCreatedAt") or file.get("creationDate")
|
||||
if sysCreatedAt is None or not isinstance(sysCreatedAt, (int, float)) or sysCreatedAt <= 0:
|
||||
file["sysCreatedAt"] = getUtcTimestamp()
|
||||
else:
|
||||
file["sysCreatedAt"] = sysCreatedAt
|
||||
|
||||
fileName = file.get("fileName")
|
||||
if not fileName or fileName == "None":
|
||||
continue
|
||||
|
||||
if file.get("scope") is None:
|
||||
file["scope"] = "personal"
|
||||
if file.get("neutralize") is None:
|
||||
file["neutralize"] = False
|
||||
|
||||
fileItem = FileItem(**file)
|
||||
fileItems.append(fileItem)
|
||||
except Exception as e:
|
||||
|
|
@ -969,7 +971,7 @@ class ComponentObjects:
|
|||
|
||||
def getFile(self, fileId: str) -> Optional[FileItem]:
|
||||
"""Returns a file by ID if it belongs to the current user (user-scoped)."""
|
||||
# Files are always user-scoped: filter by _createdBy (bypasses RBAC SysAdmin override)
|
||||
# Files are always user-scoped: filter by sysCreatedBy (bypasses RBAC SysAdmin override)
|
||||
filteredFiles = self._getFilesByCurrentUser(recordFilter={"id": fileId})
|
||||
|
||||
if not filteredFiles:
|
||||
|
|
@ -977,20 +979,19 @@ class ComponentObjects:
|
|||
|
||||
file = filteredFiles[0]
|
||||
try:
|
||||
# Get creation date from record or use current time
|
||||
creationDate = file.get("creationDate")
|
||||
if not creationDate:
|
||||
creationDate = getUtcTimestamp()
|
||||
sysCreatedAt = file.get("sysCreatedAt") or file.get("creationDate")
|
||||
if not sysCreatedAt:
|
||||
sysCreatedAt = getUtcTimestamp()
|
||||
|
||||
return FileItem(
|
||||
id=file.get("id"),
|
||||
mandateId=file.get("mandateId"),
|
||||
featureInstanceId=file.get("featureInstanceId", ""),
|
||||
fileName=file.get("fileName"),
|
||||
mimeType=file.get("mimeType"),
|
||||
workflowId=file.get("workflowId"),
|
||||
fileHash=file.get("fileHash"),
|
||||
fileSize=file.get("fileSize"),
|
||||
creationDate=creationDate
|
||||
sysCreatedAt=sysCreatedAt,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error converting file record: {str(e)}")
|
||||
|
|
@ -1053,15 +1054,20 @@ class ComponentObjects:
|
|||
# Ensure fileName is unique
|
||||
uniqueName = self._generateUniquefileName(name)
|
||||
|
||||
# Use mandateId and featureInstanceId from context for proper data isolation
|
||||
# Convert None to empty string to satisfy Pydantic validation
|
||||
mandateId = self.mandateId or ""
|
||||
featureInstanceId = self.featureInstanceId or ""
|
||||
|
||||
# Create FileItem instance
|
||||
if featureInstanceId:
|
||||
scope = "featureInstance"
|
||||
elif mandateId:
|
||||
scope = "mandate"
|
||||
else:
|
||||
scope = "personal"
|
||||
|
||||
fileItem = FileItem(
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
scope=scope,
|
||||
fileName=uniqueName,
|
||||
mimeType=mimeType,
|
||||
fileSize=fileSize,
|
||||
|
|
@ -1146,7 +1152,7 @@ class ComponentObjects:
|
|||
self.db._ensure_connection()
|
||||
with self.db.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(uniqueIds, self.userId or ""),
|
||||
)
|
||||
accessibleIds = [row["id"] for row in cursor.fetchall()]
|
||||
|
|
@ -1157,7 +1163,7 @@ class ComponentObjects:
|
|||
|
||||
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (accessibleIds,))
|
||||
cursor.execute(
|
||||
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(accessibleIds, self.userId or ""),
|
||||
)
|
||||
deletedFiles = cursor.rowcount
|
||||
|
|
@ -1202,12 +1208,12 @@ class ComponentObjects:
|
|||
|
||||
def getFolder(self, folderId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Returns a folder by ID if it belongs to the current user."""
|
||||
folders = self.db.getRecordset(FileFolder, recordFilter={"id": folderId, "_createdBy": self.userId or ""})
|
||||
folders = self.db.getRecordset(FileFolder, recordFilter={"id": folderId, "sysCreatedBy": self.userId or ""})
|
||||
return folders[0] if folders else None
|
||||
|
||||
def listFolders(self, parentId: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||
"""List folders for current user, optionally filtered by parentId."""
|
||||
recordFilter = {"_createdBy": self.userId or ""}
|
||||
recordFilter = {"sysCreatedBy": self.userId or ""}
|
||||
if parentId is not None:
|
||||
recordFilter["parentId"] = parentId
|
||||
return self.db.getRecordset(FileFolder, recordFilter=recordFilter)
|
||||
|
|
@ -1256,7 +1262,7 @@ class ComponentObjects:
|
|||
self.db._ensure_connection()
|
||||
with self.db.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(uniqueIds, self.userId or ""),
|
||||
)
|
||||
accessibleIds = [row["id"] for row in cursor.fetchall()]
|
||||
|
|
@ -1265,8 +1271,8 @@ class ComponentObjects:
|
|||
raise FileNotFoundError(f"Files not found or not accessible: {missingIds}")
|
||||
|
||||
cursor.execute(
|
||||
'UPDATE "FileItem" SET "folderId" = %s, "_modifiedAt" = %s, "_modifiedBy" = %s '
|
||||
'WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'UPDATE "FileItem" SET "folderId" = %s, "sysModifiedAt" = %s, "sysModifiedBy" = %s '
|
||||
'WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(targetFolderId, getUtcTimestamp(), self.userId or "", accessibleIds, self.userId or ""),
|
||||
)
|
||||
movedFiles = cursor.rowcount
|
||||
|
|
@ -1295,7 +1301,7 @@ class ComponentObjects:
|
|||
|
||||
existingInTarget = self.db.getRecordset(
|
||||
FileFolder,
|
||||
recordFilter={"parentId": targetParentId or "", "_createdBy": self.userId or ""},
|
||||
recordFilter={"parentId": targetParentId or "", "sysCreatedBy": self.userId or ""},
|
||||
)
|
||||
existingNames = {f.get("name"): f.get("id") for f in existingInTarget}
|
||||
movingNames: Dict[str, str] = {}
|
||||
|
|
@ -1316,8 +1322,8 @@ class ComponentObjects:
|
|||
self.db._ensure_connection()
|
||||
with self.db.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
'UPDATE "FileFolder" SET "parentId" = %s, "_modifiedAt" = %s, "_modifiedBy" = %s '
|
||||
'WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'UPDATE "FileFolder" SET "parentId" = %s, "sysModifiedAt" = %s, "sysModifiedBy" = %s '
|
||||
'WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(targetParentId, getUtcTimestamp(), self.userId or "", uniqueIds, self.userId or ""),
|
||||
)
|
||||
movedFolders = cursor.rowcount
|
||||
|
|
@ -1335,7 +1341,7 @@ class ComponentObjects:
|
|||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
|
||||
childFolders = self.db.getRecordset(FileFolder, recordFilter={"parentId": folderId, "_createdBy": self.userId or ""})
|
||||
childFolders = self.db.getRecordset(FileFolder, recordFilter={"parentId": folderId, "sysCreatedBy": self.userId or ""})
|
||||
childFiles = self._getFilesByCurrentUser(recordFilter={"folderId": folderId})
|
||||
|
||||
if not recursive and (childFolders or childFiles):
|
||||
|
|
@ -1384,7 +1390,7 @@ class ComponentObjects:
|
|||
self.db._ensure_connection()
|
||||
with self.db.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
'SELECT "id" FROM "FileFolder" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'SELECT "id" FROM "FileFolder" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(uniqueIds, self.userId or ""),
|
||||
)
|
||||
rootAccessibleIds = [row["id"] for row in cursor.fetchall()]
|
||||
|
|
@ -1397,12 +1403,12 @@ class ComponentObjects:
|
|||
WITH RECURSIVE folder_tree AS (
|
||||
SELECT "id"
|
||||
FROM "FileFolder"
|
||||
WHERE "id" = ANY(%s) AND "_createdBy" = %s
|
||||
WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s
|
||||
UNION ALL
|
||||
SELECT child."id"
|
||||
FROM "FileFolder" child
|
||||
INNER JOIN folder_tree ft ON child."parentId" = ft."id"
|
||||
WHERE child."_createdBy" = %s
|
||||
WHERE child."sysCreatedBy" = %s
|
||||
)
|
||||
SELECT DISTINCT "id" FROM folder_tree
|
||||
""",
|
||||
|
|
@ -1411,7 +1417,7 @@ class ComponentObjects:
|
|||
allFolderIds = [row["id"] for row in cursor.fetchall()]
|
||||
|
||||
cursor.execute(
|
||||
'SELECT "id" FROM "FileItem" WHERE "folderId" = ANY(%s) AND "_createdBy" = %s',
|
||||
'SELECT "id" FROM "FileItem" WHERE "folderId" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(allFolderIds, self.userId or ""),
|
||||
)
|
||||
allFileIds = [row["id"] for row in cursor.fetchall()]
|
||||
|
|
@ -1419,7 +1425,7 @@ class ComponentObjects:
|
|||
if allFileIds:
|
||||
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (allFileIds,))
|
||||
cursor.execute(
|
||||
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(allFileIds, self.userId or ""),
|
||||
)
|
||||
deletedFiles = cursor.rowcount
|
||||
|
|
@ -1427,7 +1433,7 @@ class ComponentObjects:
|
|||
deletedFiles = 0
|
||||
|
||||
cursor.execute(
|
||||
'DELETE FROM "FileFolder" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
|
||||
'DELETE FROM "FileFolder" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
|
||||
(allFolderIds, self.userId or ""),
|
||||
)
|
||||
deletedFolders = cursor.rowcount
|
||||
|
|
|
|||
|
|
@ -293,9 +293,45 @@ class SubscriptionObjects:
|
|||
if current + delta > cap:
|
||||
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionCapacityException
|
||||
raise SubscriptionCapacityException(resourceType=resourceType, currentCount=current, maxAllowed=cap)
|
||||
elif resourceType == "dataVolumeMB":
|
||||
cap = plan.maxDataVolumeMB
|
||||
if cap is None:
|
||||
return True
|
||||
currentMB = self.getMandateDataVolumeMB(mandateId)
|
||||
if currentMB + delta > cap:
|
||||
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionCapacityException
|
||||
raise SubscriptionCapacityException(resourceType=resourceType, currentCount=int(currentMB), maxAllowed=cap)
|
||||
|
||||
return True
|
||||
|
||||
def getMandateDataVolumeMB(self, mandateId: str) -> float:
|
||||
"""Total indexed data volume for the mandate (MB), for billing and capacity checks."""
|
||||
return self._getMandateDataVolumeMB(mandateId)
|
||||
|
||||
def _getMandateDataVolumeMB(self, mandateId: str) -> float:
|
||||
"""Sum RAG index size (FileContentIndex.totalSize) for the mandate; reads poweron_knowledge."""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbKnowledge import aggregateMandateRagTotalBytes
|
||||
|
||||
return aggregateMandateRagTotalBytes(mandateId) / (1024 * 1024)
|
||||
except Exception:
|
||||
return 0.0
|
||||
|
||||
def getDataVolumeWarning(self, mandateId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Return a warning dict if mandate uses >=80% of maxDataVolumeMB, else None."""
|
||||
sub = self.getOperativeForMandate(mandateId)
|
||||
if not sub:
|
||||
return None
|
||||
plan = self.getPlan(sub.get("planKey", ""))
|
||||
if not plan or not plan.maxDataVolumeMB:
|
||||
return None
|
||||
usedMB = self.getMandateDataVolumeMB(mandateId)
|
||||
limitMB = plan.maxDataVolumeMB
|
||||
percent = (usedMB / limitMB * 100) if limitMB > 0 else 0
|
||||
if percent >= 80:
|
||||
return {"usedMB": round(usedMB, 2), "limitMB": limitMB, "percent": round(percent, 1), "warning": True}
|
||||
return {"usedMB": round(usedMB, 2), "limitMB": limitMB, "percent": round(percent, 1), "warning": False}
|
||||
|
||||
# =========================================================================
|
||||
# Counting (cross-DB queries against poweron_app)
|
||||
# =========================================================================
|
||||
|
|
@ -321,11 +357,18 @@ class SubscriptionObjects:
|
|||
# Stripe quantity sync
|
||||
# =========================================================================
|
||||
|
||||
def syncQuantityToStripe(self, subscriptionId: str) -> None:
|
||||
def syncQuantityToStripe(self, subscriptionId: str, *, raiseOnError: bool = False) -> None:
|
||||
"""Update Stripe subscription item quantities to match actual active counts.
|
||||
Takes subscriptionId, not mandateId."""
|
||||
Takes subscriptionId, not mandateId.
|
||||
|
||||
Args:
|
||||
raiseOnError: If True, propagate Stripe API errors instead of logging them.
|
||||
Use True for billing-critical paths (store activation).
|
||||
"""
|
||||
sub = self.getById(subscriptionId)
|
||||
if not sub or not sub.get("stripeSubscriptionId"):
|
||||
if raiseOnError:
|
||||
raise ValueError(f"Subscription {subscriptionId} hat keine Stripe-Anbindung — Abrechnung nicht möglich.")
|
||||
return
|
||||
|
||||
mandateId = sub["mandateId"]
|
||||
|
|
@ -351,3 +394,5 @@ class SubscriptionObjects:
|
|||
logger.info("Stripe quantity synced for sub %s: users=%d, instances=%d", subscriptionId, activeUsers, activeInstances)
|
||||
except Exception as e:
|
||||
logger.error("syncQuantityToStripe(%s) failed: %s", subscriptionId, e)
|
||||
if raiseOnError:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ class FeatureInterface:
|
|||
records = self.db.getRecordset(Feature, recordFilter={"code": featureCode})
|
||||
if not records:
|
||||
return None
|
||||
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(records[0])
|
||||
return Feature(**cleanedRecord)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting feature {featureCode}: {e}")
|
||||
|
|
@ -74,7 +74,7 @@ class FeatureInterface:
|
|||
records = self.db.getRecordset(Feature)
|
||||
result = []
|
||||
for record in records:
|
||||
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(record)
|
||||
result.append(Feature(**cleanedRecord))
|
||||
return result
|
||||
except Exception as e:
|
||||
|
|
@ -120,7 +120,7 @@ class FeatureInterface:
|
|||
records = self.db.getRecordset(FeatureInstance, recordFilter={"id": instanceId})
|
||||
if not records:
|
||||
return None
|
||||
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(records[0])
|
||||
return FeatureInstance(**cleanedRecord)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting feature instance {instanceId}: {e}")
|
||||
|
|
@ -144,7 +144,7 @@ class FeatureInterface:
|
|||
records = self.db.getRecordset(FeatureInstance, recordFilter=recordFilter)
|
||||
result = []
|
||||
for record in records:
|
||||
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(record)
|
||||
result.append(FeatureInstance(**cleanedRecord))
|
||||
return result
|
||||
except Exception as e:
|
||||
|
|
@ -199,7 +199,7 @@ class FeatureInterface:
|
|||
if copyTemplateRoles:
|
||||
self._copyTemplateRoles(featureCode, mandateId, instanceId)
|
||||
|
||||
cleanedRecord = {k: v for k, v in createdInstance.items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(createdInstance)
|
||||
return FeatureInstance(**cleanedRecord)
|
||||
|
||||
except Exception as e:
|
||||
|
|
@ -208,7 +208,11 @@ class FeatureInterface:
|
|||
|
||||
def _copyTemplateRoles(self, featureCode: str, mandateId: str, instanceId: str) -> int:
|
||||
"""
|
||||
Copy global template roles for a feature to a new instance.
|
||||
Copy feature-specific template roles to a new instance.
|
||||
|
||||
INVARIANT: Feature instances MUST receive feature-specific roles
|
||||
(e.g. workspace-admin, workspace-user). NEVER generic mandate roles.
|
||||
Feature templates have featureCode set and isSystemRole=False.
|
||||
|
||||
Args:
|
||||
featureCode: Feature code
|
||||
|
|
@ -217,19 +221,30 @@ class FeatureInterface:
|
|||
|
||||
Returns:
|
||||
Number of roles copied
|
||||
|
||||
Raises:
|
||||
ValueError: If no feature-specific template roles exist
|
||||
"""
|
||||
try:
|
||||
# Find global template roles for this feature (mandateId=None)
|
||||
globalRoles = self.db.getRecordset(
|
||||
allTemplates = self.db.getRecordset(
|
||||
Role,
|
||||
recordFilter={"featureCode": featureCode, "mandateId": None}
|
||||
recordFilter={"featureCode": featureCode}
|
||||
)
|
||||
|
||||
if not globalRoles:
|
||||
logger.debug(f"No template roles found for feature {featureCode}")
|
||||
return 0
|
||||
featureTemplates = [
|
||||
r for r in allTemplates
|
||||
if r.get("mandateId") is None and r.get("featureInstanceId") is None
|
||||
]
|
||||
|
||||
templateRoleIds = [r.get("id") for r in globalRoles]
|
||||
if not featureTemplates:
|
||||
raise ValueError(
|
||||
f"No feature-specific template roles found for '{featureCode}'. "
|
||||
f"Each feature module must define TEMPLATE_ROLES and sync them to DB on startup."
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(featureTemplates)} feature-specific template roles for '{featureCode}'")
|
||||
|
||||
templateRoleIds = [r.get("id") for r in featureTemplates]
|
||||
|
||||
# BULK: Load all template AccessRules in one query
|
||||
allTemplateRules = []
|
||||
|
|
@ -246,7 +261,7 @@ class FeatureInterface:
|
|||
|
||||
# Copy roles and their AccessRules
|
||||
copiedCount = 0
|
||||
for templateRole in globalRoles:
|
||||
for templateRole in featureTemplates:
|
||||
newRoleId = str(uuid.uuid4())
|
||||
|
||||
# Create new role for this instance
|
||||
|
|
@ -282,9 +297,11 @@ class FeatureInterface:
|
|||
logger.info(f"Copied {copiedCount} template roles for instance {instanceId}")
|
||||
return copiedCount
|
||||
|
||||
except ValueError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error copying template roles: {e}")
|
||||
return 0
|
||||
raise ValueError(f"Failed to copy template roles for '{featureCode}': {e}")
|
||||
|
||||
def syncRolesFromTemplate(self, featureInstanceId: str, addOnly: bool = True) -> Dict[str, int]:
|
||||
"""
|
||||
|
|
@ -309,11 +326,15 @@ class FeatureInterface:
|
|||
featureCode = instance.featureCode
|
||||
mandateId = instance.mandateId
|
||||
|
||||
# Get current template roles
|
||||
templateRoles = self.db.getRecordset(
|
||||
# Get feature-specific template roles (mandateId=None, featureInstanceId=None)
|
||||
allForFeature = self.db.getRecordset(
|
||||
Role,
|
||||
recordFilter={"featureCode": featureCode, "mandateId": None}
|
||||
recordFilter={"featureCode": featureCode}
|
||||
)
|
||||
templateRoles = [
|
||||
r for r in allForFeature
|
||||
if r.get("mandateId") is None and r.get("featureInstanceId") is None
|
||||
]
|
||||
templateLabels = {r.get("roleLabel") for r in templateRoles}
|
||||
|
||||
# Get current instance roles
|
||||
|
|
@ -414,7 +435,7 @@ class FeatureInterface:
|
|||
|
||||
updated = self.db.recordModify(FeatureInstance, instanceId, filteredData)
|
||||
if updated:
|
||||
cleanedRecord = {k: v for k, v in updated.items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(updated)
|
||||
return FeatureInstance(**cleanedRecord)
|
||||
return None
|
||||
except Exception as e:
|
||||
|
|
@ -463,7 +484,7 @@ class FeatureInterface:
|
|||
records = self.db.getRecordset(Role, recordFilter=recordFilter)
|
||||
result = []
|
||||
for record in records:
|
||||
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
|
||||
cleanedRecord = dict(record)
|
||||
result.append(Role(**cleanedRecord))
|
||||
return result
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ Data Namespace Structure:
|
|||
|
||||
GROUP-Berechtigung:
|
||||
- data.uam.*: GROUP filtert nach Mandant (via UserMandate)
|
||||
- data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen); bei gesetztem featureInstanceId zusätzlich _createdBy
|
||||
- data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen); bei gesetztem featureInstanceId zusätzlich sysCreatedBy
|
||||
- data.feature.*: GROUP filtert nach mandateId/featureInstanceId
|
||||
"""
|
||||
|
||||
|
|
@ -146,7 +146,7 @@ def getRecordsetWithRBAC(
|
|||
mandateId: Explicit mandate context (from request header). Required for GROUP access.
|
||||
featureInstanceId: Explicit feature instance context
|
||||
enrichPermissions: If True, adds _permissions field to each record with row-level
|
||||
permissions { canUpdate, canDelete } based on RBAC rules and _createdBy
|
||||
permissions { canUpdate, canDelete } based on RBAC rules and sysCreatedBy
|
||||
featureCode: Optional feature code for feature-specific tables (e.g., "trustee").
|
||||
If None, table is treated as a system table.
|
||||
|
||||
|
|
@ -657,7 +657,7 @@ def buildRbacWhereClause(
|
|||
# shared featureInstance (stale RBAC rules or merged roles). Same as MY.
|
||||
namespaceAll = TABLE_NAMESPACE.get(table, "system")
|
||||
if featureInstanceId and namespaceAll == "chat":
|
||||
userIdFieldAll = "_createdBy"
|
||||
userIdFieldAll = "sysCreatedBy"
|
||||
if table == "UserInDB":
|
||||
userIdFieldAll = "id"
|
||||
elif table == "UserConnection":
|
||||
|
|
@ -671,7 +671,7 @@ def buildRbacWhereClause(
|
|||
return {"condition": " AND ".join(baseConditions), "values": baseValues}
|
||||
return None
|
||||
|
||||
# My records - filter by _createdBy or userId field
|
||||
# My records - filter by sysCreatedBy or userId field
|
||||
if readLevel == AccessLevel.MY:
|
||||
# Try common field names for creator
|
||||
userIdField = None
|
||||
|
|
@ -680,7 +680,7 @@ def buildRbacWhereClause(
|
|||
elif table == "UserConnection":
|
||||
userIdField = "userId"
|
||||
else:
|
||||
userIdField = "_createdBy"
|
||||
userIdField = "sysCreatedBy"
|
||||
|
||||
conditions = list(baseConditions)
|
||||
values = list(baseValues)
|
||||
|
|
@ -707,7 +707,7 @@ def buildRbacWhereClause(
|
|||
if featureInstanceId and readLevel == AccessLevel.GROUP:
|
||||
conditions = list(baseConditions)
|
||||
values = list(baseValues)
|
||||
conditions.append('"_createdBy" = %s')
|
||||
conditions.append('"sysCreatedBy" = %s')
|
||||
values.append(currentUser.id)
|
||||
return {"condition": " AND ".join(conditions), "values": values}
|
||||
return {"condition": " AND ".join(baseConditions), "values": baseValues}
|
||||
|
|
@ -829,7 +829,7 @@ def _enrichRecordsWithPermissions(
|
|||
|
||||
Logic:
|
||||
- AccessLevel.ALL ('a'): User can update/delete all records
|
||||
- AccessLevel.MY ('m'): User can only update/delete records where _createdBy == userId
|
||||
- AccessLevel.MY ('m'): User can only update/delete records where sysCreatedBy == userId
|
||||
- AccessLevel.GROUP ('g'): Same as MY for now (group-level ownership)
|
||||
- AccessLevel.NONE ('n'): User cannot update/delete any records
|
||||
|
||||
|
|
@ -846,7 +846,7 @@ def _enrichRecordsWithPermissions(
|
|||
|
||||
for record in records:
|
||||
recordCopy = dict(record)
|
||||
createdBy = record.get("_createdBy")
|
||||
createdBy = record.get("sysCreatedBy")
|
||||
|
||||
# Determine canUpdate
|
||||
canUpdate = _checkRowPermission(permissions.update, userId, createdBy)
|
||||
|
|
@ -873,7 +873,7 @@ def _checkRowPermission(
|
|||
Args:
|
||||
accessLevel: The permission level (ALL, MY, GROUP, NONE)
|
||||
userId: Current user's ID
|
||||
recordCreatedBy: The _createdBy value of the record
|
||||
recordCreatedBy: The sysCreatedBy value of the record
|
||||
|
||||
Returns:
|
||||
True if user has permission, False otherwise
|
||||
|
|
@ -884,9 +884,9 @@ def _checkRowPermission(
|
|||
if accessLevel == AccessLevel.ALL:
|
||||
return True
|
||||
|
||||
# MY and GROUP: Check ownership via _createdBy
|
||||
# MY and GROUP: Check ownership via sysCreatedBy
|
||||
if accessLevel in (AccessLevel.MY, AccessLevel.GROUP):
|
||||
# If record has no _createdBy, allow access (can't verify ownership)
|
||||
# If record has no sysCreatedBy, allow access (can't verify ownership)
|
||||
if not recordCreatedBy:
|
||||
return True
|
||||
# If no userId, can't verify - deny
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ import logging
|
|||
from typing import AsyncGenerator, Callable, Dict, Any, Optional, List
|
||||
|
||||
from modules.connectors.connectorVoiceGoogle import ConnectorGoogleSpeech
|
||||
from modules.datamodels.datamodelVoice import VoiceSettings
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -335,123 +333,6 @@ class VoiceObjects:
|
|||
"error": str(e)
|
||||
}
|
||||
|
||||
# Voice Settings Management
|
||||
|
||||
def getVoiceSettings(self, userId: str) -> Optional[VoiceSettings]:
|
||||
"""
|
||||
Get voice settings for a user.
|
||||
|
||||
Args:
|
||||
userId: User ID to get settings for
|
||||
|
||||
Returns:
|
||||
VoiceSettings object or None if not found
|
||||
"""
|
||||
try:
|
||||
# This would typically query the database
|
||||
# For now, return None as this is handled by the database interface
|
||||
logger.debug(f"Getting voice settings for user: {userId}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting voice settings: {e}")
|
||||
return None
|
||||
|
||||
def createVoiceSettings(self, settingsData: Dict[str, Any]) -> Optional[VoiceSettings]:
|
||||
"""
|
||||
Create new voice settings.
|
||||
|
||||
Args:
|
||||
settingsData: Dictionary containing voice settings data
|
||||
|
||||
Returns:
|
||||
Created VoiceSettings object or None if failed
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Creating voice settings: {settingsData}")
|
||||
|
||||
# Ensure mandateId is set from context if not provided
|
||||
if "mandateId" not in settingsData or not settingsData["mandateId"]:
|
||||
if not self.mandateId:
|
||||
raise ValueError("mandateId is required but not provided and context has no mandateId")
|
||||
settingsData["mandateId"] = self.mandateId
|
||||
|
||||
# Add timestamps
|
||||
currentTime = getUtcTimestamp()
|
||||
settingsData["creationDate"] = currentTime
|
||||
settingsData["lastModified"] = currentTime
|
||||
|
||||
# Create VoiceSettings object
|
||||
voiceSettings = VoiceSettings(**settingsData)
|
||||
|
||||
logger.info(f"✅ Voice settings created: {voiceSettings.id}")
|
||||
return voiceSettings
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error creating voice settings: {e}")
|
||||
return None
|
||||
|
||||
def updateVoiceSettings(self, userId: str, settingsData: Dict[str, Any]) -> Optional[VoiceSettings]:
|
||||
"""
|
||||
Update existing voice settings.
|
||||
|
||||
Args:
|
||||
userId: User ID to update settings for
|
||||
settingsData: Dictionary containing updated voice settings data
|
||||
|
||||
Returns:
|
||||
Updated VoiceSettings object or None if failed
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Updating voice settings for user {userId}: {settingsData}")
|
||||
|
||||
# Add last modified timestamp
|
||||
settingsData["lastModified"] = getUtcTimestamp()
|
||||
|
||||
# Create updated VoiceSettings object
|
||||
voiceSettings = VoiceSettings(**settingsData)
|
||||
|
||||
logger.info(f"✅ Voice settings updated: {voiceSettings.id}")
|
||||
return voiceSettings
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error updating voice settings: {e}")
|
||||
return None
|
||||
|
||||
def getOrCreateVoiceSettings(self, userId: str) -> Optional[VoiceSettings]:
|
||||
"""
|
||||
Get existing voice settings or create default ones.
|
||||
|
||||
Args:
|
||||
userId: User ID to get/create settings for
|
||||
|
||||
Returns:
|
||||
VoiceSettings object
|
||||
"""
|
||||
try:
|
||||
# Try to get existing settings
|
||||
existingSettings = self.getVoiceSettings(userId)
|
||||
|
||||
if existingSettings:
|
||||
return existingSettings
|
||||
|
||||
# Create default settings if none exist
|
||||
defaultSettings = {
|
||||
"userId": userId,
|
||||
"mandateId": self.mandateId,
|
||||
"sttLanguage": "de-DE",
|
||||
"ttsLanguage": "de-DE",
|
||||
"ttsVoice": "de-DE-Wavenet-A",
|
||||
"translationEnabled": True,
|
||||
"targetLanguage": "en-US"
|
||||
}
|
||||
|
||||
return self.createVoiceSettings(defaultSettings)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Error getting or creating voice settings: {e}")
|
||||
return None
|
||||
|
||||
# Language and Voice Information
|
||||
|
||||
async def getAvailableLanguages(self) -> Dict[str, Any]:
|
||||
|
|
|
|||
1
modules/migration/__init__.py
Normal file
1
modules/migration/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
# Migration modules
|
||||
114
modules/migration/migrateRagScopeFields.py
Normal file
114
modules/migration/migrateRagScopeFields.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Migration: Backfill FileContentIndex scope fields from FileItem (Single Source of Truth).
|
||||
|
||||
Fixes legacy rows in poweron_knowledge where scope/mandateId/featureInstanceId
|
||||
are empty or default ("personal") despite the corresponding FileItem having correct values.
|
||||
|
||||
Idempotent — safe to run multiple times. Uses a DB flag to skip if already completed.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.connectors.connectorDbPostgre import _get_cached_connector
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_MIGRATION_FLAG_KEY = "migration_rag_scope_fields_completed"
|
||||
|
||||
|
||||
def _isMigrationCompleted(appDb) -> bool:
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
records = appDb.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
|
||||
return len(records) > 0
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _setMigrationCompleted(appDb) -> None:
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
flag = Mandate(name=_MIGRATION_FLAG_KEY, description="RAG scope fields migration completed")
|
||||
appDb.recordCreate(Mandate, flag)
|
||||
except Exception as e:
|
||||
logger.error("Could not set migration flag: %s", e)
|
||||
|
||||
|
||||
def runMigration(appDb=None) -> dict:
|
||||
"""Backfill FileContentIndex rows from FileItem metadata.
|
||||
|
||||
Returns dict with counts: {total, updated, skipped, orphaned}.
|
||||
"""
|
||||
from modules.datamodels.datamodelKnowledge import FileContentIndex
|
||||
from modules.datamodels.datamodelFiles import FileItem
|
||||
from modules.interfaces.interfaceDbKnowledge import getInterface as getKnowledgeInterface
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
if appDb is None:
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
appDb = getRootInterface().db
|
||||
|
||||
if _isMigrationCompleted(appDb):
|
||||
logger.info("migrateRagScopeFields: already completed, skipping")
|
||||
return {"total": 0, "updated": 0, "skipped": 0, "orphaned": 0}
|
||||
|
||||
knowDb = getKnowledgeInterface(None).db
|
||||
mgmtDb = ComponentObjects().db
|
||||
|
||||
allIndexes = knowDb.getRecordset(FileContentIndex, recordFilter={})
|
||||
total = len(allIndexes)
|
||||
updated = 0
|
||||
skipped = 0
|
||||
orphaned = 0
|
||||
|
||||
logger.info("migrateRagScopeFields: processing %d FileContentIndex rows", total)
|
||||
|
||||
for idx in allIndexes:
|
||||
idxId = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
|
||||
if not idxId:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
fileItem = mgmtDb._loadRecord(FileItem, str(idxId))
|
||||
if not fileItem:
|
||||
orphaned += 1
|
||||
continue
|
||||
|
||||
_get = (lambda k, d="": fileItem.get(k, d)) if isinstance(fileItem, dict) else (lambda k, d="": getattr(fileItem, k, d))
|
||||
|
||||
fiScope = _get("scope") or "personal"
|
||||
fiMandateId = str(_get("mandateId") or "")
|
||||
fiFeatureInstanceId = str(_get("featureInstanceId") or "")
|
||||
|
||||
idxGet = (lambda k, d="": idx.get(k, d)) if isinstance(idx, dict) else (lambda k, d="": getattr(idx, k, d))
|
||||
currentScope = idxGet("scope") or "personal"
|
||||
currentMandateId = str(idxGet("mandateId") or "")
|
||||
currentFeatureInstanceId = str(idxGet("featureInstanceId") or "")
|
||||
|
||||
updates = {}
|
||||
if fiScope != currentScope:
|
||||
updates["scope"] = fiScope
|
||||
if fiMandateId and fiMandateId != currentMandateId:
|
||||
updates["mandateId"] = fiMandateId
|
||||
if fiFeatureInstanceId and fiFeatureInstanceId != currentFeatureInstanceId:
|
||||
updates["featureInstanceId"] = fiFeatureInstanceId
|
||||
|
||||
if updates:
|
||||
try:
|
||||
knowDb.recordModify(FileContentIndex, str(idxId), updates)
|
||||
updated += 1
|
||||
logger.debug("migrateRagScopeFields: updated %s -> %s", idxId, updates)
|
||||
except Exception as e:
|
||||
logger.error("migrateRagScopeFields: failed to update %s: %s", idxId, e)
|
||||
skipped += 1
|
||||
else:
|
||||
skipped += 1
|
||||
|
||||
_setMigrationCompleted(appDb)
|
||||
logger.info(
|
||||
"migrateRagScopeFields complete: total=%d, updated=%d, skipped=%d, orphaned=%d",
|
||||
total, updated, skipped, orphaned,
|
||||
)
|
||||
return {"total": total, "updated": updated, "skipped": skipped, "orphaned": orphaned}
|
||||
329
modules/migration/migrateRootUsers.py
Normal file
329
modules/migration/migrateRootUsers.py
Normal file
|
|
@ -0,0 +1,329 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Migration: Root-Mandant bereinigen.
|
||||
Moves all end-user data from Root mandate shared instances to own mandates.
|
||||
Called once from bootstrap, sets a DB flag to prevent re-execution.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_MIGRATION_FLAG_KEY = "migration_root_users_completed"
|
||||
|
||||
_DATA_TABLES = [
|
||||
"ChatWorkflow",
|
||||
"FileItem",
|
||||
"DataSource",
|
||||
"DataNeutralizerAttributes",
|
||||
"FileContentIndex",
|
||||
]
|
||||
|
||||
|
||||
def _isMigrationCompleted(db) -> bool:
|
||||
"""Check if migration has already been executed."""
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
records = db.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
|
||||
return len(records) > 0
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _setMigrationCompleted(db) -> None:
|
||||
"""Set flag that migration is completed (uses a settings-like record)."""
|
||||
if _isMigrationCompleted(db):
|
||||
return
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
flag = Mandate(name=_MIGRATION_FLAG_KEY, label="Migration completed", enabled=False, isSystem=True)
|
||||
db.recordCreate(Mandate, flag)
|
||||
logger.info("Migration flag set: root user migration completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set migration flag: {e}")
|
||||
|
||||
|
||||
def _findOrCreateTargetInstance(db, featureInterface, featureCode: str, targetMandateId: str, rootInstance: dict) -> dict:
|
||||
"""Find existing or create new FeatureInstance in target mandate. Idempotent."""
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
|
||||
existing = db.getRecordset(FeatureInstance, recordFilter={
|
||||
"featureCode": featureCode,
|
||||
"mandateId": targetMandateId,
|
||||
})
|
||||
if existing:
|
||||
logger.debug(f"Target instance already exists for {featureCode} in mandate {targetMandateId}")
|
||||
return existing[0]
|
||||
|
||||
label = rootInstance.get("label") or featureCode
|
||||
instance = featureInterface.createFeatureInstance(
|
||||
featureCode=featureCode,
|
||||
mandateId=targetMandateId,
|
||||
label=label,
|
||||
enabled=True,
|
||||
copyTemplateRoles=True,
|
||||
)
|
||||
if isinstance(instance, dict):
|
||||
return instance
|
||||
return instance.model_dump() if hasattr(instance, "model_dump") else {"id": instance.id}
|
||||
|
||||
|
||||
def _migrateDataRecords(db, oldInstanceId: str, newInstanceId: str, userId: str) -> int:
|
||||
"""Bulk-update featureInstanceId on all data tables for records owned by userId."""
|
||||
totalMigrated = 0
|
||||
db._ensure_connection()
|
||||
for tableName in _DATA_TABLES:
|
||||
try:
|
||||
with db.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f'UPDATE "{tableName}" '
|
||||
f'SET "featureInstanceId" = %s '
|
||||
f'WHERE "featureInstanceId" = %s AND "sysCreatedBy" = %s',
|
||||
(newInstanceId, oldInstanceId, userId),
|
||||
)
|
||||
count = cursor.rowcount
|
||||
db.connection.commit()
|
||||
if count > 0:
|
||||
logger.info(f" Migrated {count} rows in {tableName}: {oldInstanceId} -> {newInstanceId}")
|
||||
totalMigrated += count
|
||||
except Exception as e:
|
||||
try:
|
||||
db.connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
logger.debug(f" Table {tableName} skipped (may not exist or no matching column): {e}")
|
||||
return totalMigrated
|
||||
|
||||
|
||||
def _grantFeatureAccess(db, userId: str, featureInstanceId: str) -> dict:
|
||||
"""Create FeatureAccess + admin role on a feature instance. Idempotent."""
|
||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
|
||||
existing = db.getRecordset(FeatureAccess, recordFilter={
|
||||
"userId": userId,
|
||||
"featureInstanceId": featureInstanceId,
|
||||
})
|
||||
if existing:
|
||||
logger.debug(f"FeatureAccess already exists for user {userId} on instance {featureInstanceId}")
|
||||
return existing[0]
|
||||
|
||||
fa = FeatureAccess(userId=userId, featureInstanceId=featureInstanceId, enabled=True)
|
||||
createdFa = db.recordCreate(FeatureAccess, fa.model_dump())
|
||||
if not createdFa:
|
||||
logger.warning(f"Failed to create FeatureAccess for user {userId} on instance {featureInstanceId}")
|
||||
return {}
|
||||
|
||||
instanceRoles = db.getRecordset(Role, recordFilter={"featureInstanceId": featureInstanceId})
|
||||
adminRoleId = None
|
||||
for r in instanceRoles:
|
||||
roleLabel = (r.get("roleLabel") or "").lower()
|
||||
if roleLabel.endswith("-admin"):
|
||||
adminRoleId = r.get("id")
|
||||
break
|
||||
if not adminRoleId:
|
||||
raise ValueError(
|
||||
f"No feature-specific admin role for instance {featureInstanceId}. "
|
||||
f"Cannot create FeatureAccess without role — even in migration context."
|
||||
)
|
||||
far = FeatureAccessRole(featureAccessId=createdFa["id"], roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far.model_dump())
|
||||
|
||||
return createdFa
|
||||
|
||||
|
||||
def migrateRootUsers(db, dryRun: bool = False) -> dict:
|
||||
"""
|
||||
Migrate all end-user feature data from Root mandate to personal mandates.
|
||||
|
||||
Algorithm:
|
||||
STEP 1: For each user with FeatureAccess on Root instances:
|
||||
- If user has own mandate: target = existing mandate
|
||||
- If not: create personal mandate via _provisionMandateForUser
|
||||
- For each FeatureAccess: create new instance in target, migrate data, transfer access
|
||||
|
||||
STEP 2: Clean up Root:
|
||||
- Delete all FeatureInstances in Root
|
||||
- Remove UserMandate for non-sysadmin users
|
||||
|
||||
Args:
|
||||
db: Database connector
|
||||
dryRun: If True, log actions without making changes
|
||||
|
||||
Returns:
|
||||
Summary dict with migration statistics
|
||||
"""
|
||||
if _isMigrationCompleted(db):
|
||||
logger.info("Root user migration already completed, skipping")
|
||||
return {"status": "already_completed"}
|
||||
|
||||
from modules.datamodels.datamodelUam import Mandate, User, UserInDB
|
||||
from modules.datamodels.datamodelMembership import (
|
||||
UserMandate, UserMandateRole, FeatureAccess, FeatureAccessRole,
|
||||
)
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
featureInterface = getFeatureInterface(db)
|
||||
stats = {
|
||||
"usersProcessed": 0,
|
||||
"mandatesCreated": 0,
|
||||
"instancesMigrated": 0,
|
||||
"dataRowsMigrated": 0,
|
||||
"rootInstancesDeleted": 0,
|
||||
"rootMembershipsRemoved": 0,
|
||||
"dryRun": dryRun,
|
||||
}
|
||||
|
||||
# Find root mandate
|
||||
rootMandates = db.getRecordset(Mandate, recordFilter={"name": "root", "isSystem": True})
|
||||
if not rootMandates:
|
||||
logger.warning("No root mandate found, nothing to migrate")
|
||||
return {"status": "no_root_mandate"}
|
||||
rootMandateId = rootMandates[0].get("id")
|
||||
|
||||
# Get all feature instances in root
|
||||
rootInstances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": rootMandateId})
|
||||
if not rootInstances:
|
||||
logger.info("No feature instances in root mandate, nothing to migrate")
|
||||
if not dryRun:
|
||||
_setMigrationCompleted(db)
|
||||
return {"status": "no_instances", **stats}
|
||||
|
||||
# Get all FeatureAccess on root instances
|
||||
rootInstanceIds = {inst.get("id") for inst in rootInstances}
|
||||
|
||||
# Collect unique users with access on root instances
|
||||
usersToMigrate = {}
|
||||
for instanceId in rootInstanceIds:
|
||||
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instanceId})
|
||||
for access in accesses:
|
||||
userId = access.get("userId")
|
||||
if userId not in usersToMigrate:
|
||||
usersToMigrate[userId] = []
|
||||
usersToMigrate[userId].append({
|
||||
"featureAccessId": access.get("id"),
|
||||
"featureInstanceId": instanceId,
|
||||
})
|
||||
|
||||
logger.info(f"Migration: {len(usersToMigrate)} users with {sum(len(v) for v in usersToMigrate.values())} accesses on {len(rootInstances)} root instances")
|
||||
|
||||
# STEP 1: Migrate users
|
||||
for userId, accessList in usersToMigrate.items():
|
||||
try:
|
||||
# Find user
|
||||
users = db.getRecordset(UserInDB, recordFilter={"id": userId})
|
||||
if not users:
|
||||
logger.warning(f"User {userId} not found, skipping")
|
||||
continue
|
||||
user = users[0]
|
||||
username = user.get("username", "unknown")
|
||||
|
||||
# Check if user has own non-root mandate
|
||||
userMandates = db.getRecordset(UserMandate, recordFilter={"userId": userId, "enabled": True})
|
||||
targetMandateId = None
|
||||
for um in userMandates:
|
||||
mid = um.get("mandateId")
|
||||
if mid != rootMandateId:
|
||||
targetMandateId = mid
|
||||
break
|
||||
|
||||
if not targetMandateId:
|
||||
# Create personal mandate
|
||||
if dryRun:
|
||||
logger.info(f"[DRY RUN] Would create personal mandate for user {username}")
|
||||
stats["mandatesCreated"] += 1
|
||||
else:
|
||||
try:
|
||||
result = rootInterface._provisionMandateForUser(
|
||||
userId=userId,
|
||||
mandateName=f"Home {username}",
|
||||
planKey="TRIAL_7D",
|
||||
)
|
||||
targetMandateId = result["mandateId"]
|
||||
stats["mandatesCreated"] += 1
|
||||
logger.info(f"Created personal mandate {targetMandateId} for user {username}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create mandate for user {username}: {e}")
|
||||
continue
|
||||
|
||||
# Migrate each FeatureAccess
|
||||
for accessInfo in accessList:
|
||||
oldInstanceId = accessInfo["featureInstanceId"]
|
||||
oldAccessId = accessInfo["featureAccessId"]
|
||||
|
||||
# Find the root instance details
|
||||
instRecords = db.getRecordset(FeatureInstance, recordFilter={"id": oldInstanceId})
|
||||
if not instRecords:
|
||||
continue
|
||||
featureCode = instRecords[0].get("featureCode")
|
||||
|
||||
if dryRun:
|
||||
logger.info(f"[DRY RUN] Would migrate {featureCode} for {username} to mandate {targetMandateId}")
|
||||
stats["instancesMigrated"] += 1
|
||||
else:
|
||||
targetInstance = _findOrCreateTargetInstance(
|
||||
db, featureInterface, featureCode, targetMandateId, instRecords[0],
|
||||
)
|
||||
newInstanceId = targetInstance.get("id")
|
||||
if not newInstanceId:
|
||||
logger.error(f"Failed to obtain target instance for {featureCode} in mandate {targetMandateId}")
|
||||
continue
|
||||
|
||||
migratedCount = _migrateDataRecords(db, oldInstanceId, newInstanceId, userId)
|
||||
|
||||
_grantFeatureAccess(db, userId, newInstanceId)
|
||||
|
||||
try:
|
||||
db.recordDelete(FeatureAccess, oldAccessId)
|
||||
except Exception as delErr:
|
||||
logger.warning(f"Could not remove old FeatureAccess {oldAccessId}: {delErr}")
|
||||
|
||||
logger.info(
|
||||
f"Migrated {featureCode} for {username}: "
|
||||
f"instance {oldInstanceId} -> {newInstanceId}, {migratedCount} data rows moved"
|
||||
)
|
||||
stats["instancesMigrated"] += 1
|
||||
stats["dataRowsMigrated"] += migratedCount
|
||||
|
||||
stats["usersProcessed"] += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error migrating user {userId}: {e}")
|
||||
|
||||
# STEP 2: Clean up root
|
||||
if not dryRun:
|
||||
# Delete all feature instances in root
|
||||
for inst in rootInstances:
|
||||
instId = inst.get("id")
|
||||
try:
|
||||
# First delete all FeatureAccess on this instance
|
||||
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instId})
|
||||
for access in accesses:
|
||||
db.recordDelete(FeatureAccess, access.get("id"))
|
||||
db.recordDelete(FeatureInstance, instId)
|
||||
stats["rootInstancesDeleted"] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting root instance {instId}: {e}")
|
||||
|
||||
# Remove non-sysadmin users from root mandate
|
||||
rootMembers = db.getRecordset(UserMandate, recordFilter={"mandateId": rootMandateId})
|
||||
for membership in rootMembers:
|
||||
membUserId = membership.get("userId")
|
||||
userRecords = db.getRecordset(UserInDB, recordFilter={"id": membUserId})
|
||||
if userRecords and userRecords[0].get("isSysAdmin"):
|
||||
continue
|
||||
try:
|
||||
db.recordDelete(UserMandate, membership.get("id"))
|
||||
stats["rootMembershipsRemoved"] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing root membership for {membUserId}: {e}")
|
||||
|
||||
_setMigrationCompleted(db)
|
||||
|
||||
logger.info(f"Migration completed: {stats}")
|
||||
return {"status": "completed", **stats}
|
||||
316
modules/migration/migrateVoiceAndDocuments.py
Normal file
316
modules/migration/migrateVoiceAndDocuments.py
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Migration: Voice settings consolidation and CoachingDocument scope-tagging.
|
||||
Moves VoiceSettings (workspace DB) and CoachingUserProfile voice fields (commcoach DB)
|
||||
into the unified UserVoicePreferences model, and tags CoachingDocument files with
|
||||
featureInstance scope before deleting the legacy records.
|
||||
Called once from bootstrap, sets a DB flag to prevent re-execution.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.datamodels.datamodelUam import UserVoicePreferences
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_MIGRATION_FLAG_KEY = "migration_voice_documents_completed"
|
||||
|
||||
|
||||
def _isMigrationCompleted(db) -> bool:
|
||||
"""Check if migration has already been executed."""
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
records = db.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
|
||||
return len(records) > 0
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _setMigrationCompleted(db) -> None:
|
||||
"""Set flag that migration is completed (uses a settings-like record)."""
|
||||
if _isMigrationCompleted(db):
|
||||
return
|
||||
try:
|
||||
from modules.datamodels.datamodelUam import Mandate
|
||||
flag = Mandate(name=_MIGRATION_FLAG_KEY, label="Migration completed", enabled=False, isSystem=True)
|
||||
db.recordCreate(Mandate, flag)
|
||||
logger.info("Migration flag set: voice & documents migration completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to set migration flag: {e}")
|
||||
|
||||
|
||||
def _getRawRows(connector: DatabaseConnector, tableName: str, columns: List[str]) -> List[Dict]:
|
||||
"""Read all rows from a table via raw SQL. Returns empty list if table doesn't exist."""
|
||||
try:
|
||||
connector._ensure_connection()
|
||||
colList = ", ".join(f'"{c}"' for c in columns)
|
||||
with connector.connection.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT COUNT(*) FROM information_schema.tables "
|
||||
"WHERE LOWER(table_name) = LOWER(%s) AND table_schema = 'public'",
|
||||
(tableName,),
|
||||
)
|
||||
if cur.fetchone()["count"] == 0:
|
||||
logger.info(f"Table '{tableName}' does not exist, skipping")
|
||||
return []
|
||||
cur.execute(f'SELECT {colList} FROM "{tableName}"')
|
||||
return [dict(row) for row in cur.fetchall()]
|
||||
except Exception as e:
|
||||
logger.warning(f"Raw query on '{tableName}' failed: {e}")
|
||||
try:
|
||||
connector.connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
|
||||
def _deleteRawRow(connector: DatabaseConnector, tableName: str, rowId: str) -> bool:
|
||||
"""Delete a single row by id via raw SQL."""
|
||||
try:
|
||||
connector._ensure_connection()
|
||||
with connector.connection.cursor() as cur:
|
||||
cur.execute(f'DELETE FROM "{tableName}" WHERE "id" = %s', (rowId,))
|
||||
connector.connection.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete row {rowId} from '{tableName}': {e}")
|
||||
try:
|
||||
connector.connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def _createDbConnector(dbName: str) -> Optional[DatabaseConnector]:
|
||||
"""Create a DatabaseConnector for a named database, returns None on failure."""
|
||||
try:
|
||||
dbHost = APP_CONFIG.get("DB_HOST")
|
||||
dbUser = APP_CONFIG.get("DB_USER")
|
||||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
return DatabaseConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbName,
|
||||
dbUser=dbUser,
|
||||
dbPassword=dbPassword,
|
||||
dbPort=dbPort,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not connect to database '{dbName}': {e}")
|
||||
return None
|
||||
|
||||
|
||||
# ─── Part A ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _migrateVoiceSettings(db, wsDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
|
||||
"""Migrate VoiceSettings records from poweron_workspace into UserVoicePreferences."""
|
||||
rows = _getRawRows(wsDb, "VoiceSettings", [
|
||||
"id", "userId", "mandateId", "ttsVoiceMap", "sttLanguage", "ttsLanguage", "ttsVoice",
|
||||
])
|
||||
if not rows:
|
||||
logger.info("Part A: No VoiceSettings records found, skipping")
|
||||
return
|
||||
|
||||
for row in rows:
|
||||
userId = row.get("userId")
|
||||
if not userId:
|
||||
continue
|
||||
|
||||
existing = db.getRecordset(UserVoicePreferences, recordFilter={"userId": userId})
|
||||
if existing:
|
||||
stats["voiceSettingsSkipped"] += 1
|
||||
if not dryRun:
|
||||
_deleteRawRow(wsDb, "VoiceSettings", row["id"])
|
||||
continue
|
||||
|
||||
if dryRun:
|
||||
logger.info(f"[DRY RUN] Would create UserVoicePreferences for user {userId} from VoiceSettings")
|
||||
stats["voiceSettingsCreated"] += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
import json
|
||||
ttsVoiceMap = row.get("ttsVoiceMap")
|
||||
if isinstance(ttsVoiceMap, str):
|
||||
try:
|
||||
ttsVoiceMap = json.loads(ttsVoiceMap)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
ttsVoiceMap = None
|
||||
|
||||
prefs = UserVoicePreferences(
|
||||
userId=userId,
|
||||
mandateId=row.get("mandateId"),
|
||||
ttsVoiceMap=ttsVoiceMap,
|
||||
sttLanguage=row.get("sttLanguage", "de-DE"),
|
||||
ttsLanguage=row.get("ttsLanguage", "de-DE"),
|
||||
ttsVoice=row.get("ttsVoice"),
|
||||
)
|
||||
db.recordCreate(UserVoicePreferences, prefs)
|
||||
stats["voiceSettingsCreated"] += 1
|
||||
_deleteRawRow(wsDb, "VoiceSettings", row["id"])
|
||||
except Exception as e:
|
||||
logger.error(f"Part A: Failed to migrate VoiceSettings {row['id']}: {e}")
|
||||
stats["errors"] += 1
|
||||
|
||||
|
||||
# ─── Part B ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _migrateCoachingProfileVoice(db, ccDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
|
||||
"""Migrate preferredLanguage/preferredVoice from CoachingUserProfile into UserVoicePreferences."""
|
||||
rows = _getRawRows(ccDb, "CoachingUserProfile", [
|
||||
"id", "userId", "mandateId", "preferredLanguage", "preferredVoice",
|
||||
])
|
||||
if not rows:
|
||||
logger.info("Part B: No CoachingUserProfile records with voice data found, skipping")
|
||||
return
|
||||
|
||||
for row in rows:
|
||||
userId = row.get("userId")
|
||||
prefLang = row.get("preferredLanguage")
|
||||
prefVoice = row.get("preferredVoice")
|
||||
if not userId or (not prefLang and not prefVoice):
|
||||
continue
|
||||
|
||||
existing = db.getRecordset(UserVoicePreferences, recordFilter={"userId": userId})
|
||||
if existing:
|
||||
stats["coachingProfileSkipped"] += 1
|
||||
continue
|
||||
|
||||
if dryRun:
|
||||
logger.info(f"[DRY RUN] Would create UserVoicePreferences for user {userId} from CoachingUserProfile")
|
||||
stats["coachingProfileCreated"] += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
prefs = UserVoicePreferences(
|
||||
userId=userId,
|
||||
mandateId=row.get("mandateId"),
|
||||
sttLanguage=prefLang or "de-DE",
|
||||
ttsLanguage=prefLang or "de-DE",
|
||||
ttsVoice=prefVoice,
|
||||
)
|
||||
db.recordCreate(UserVoicePreferences, prefs)
|
||||
stats["coachingProfileCreated"] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Part B: Failed to migrate CoachingUserProfile {row['id']}: {e}")
|
||||
stats["errors"] += 1
|
||||
|
||||
|
||||
# ─── Part C ───────────────────────────────────────────────────────────────────
|
||||
|
||||
def _migrateCoachingDocuments(ccDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
|
||||
"""Tag FileItem/FileContentIndex with featureInstance scope for each CoachingDocument."""
|
||||
from modules.datamodels.datamodelFiles import FileItem
|
||||
from modules.datamodels.datamodelKnowledge import FileContentIndex
|
||||
|
||||
rows = _getRawRows(ccDb, "CoachingDocument", [
|
||||
"id", "fileRef", "instanceId",
|
||||
])
|
||||
if not rows:
|
||||
logger.info("Part C: No CoachingDocument records found, skipping")
|
||||
return
|
||||
|
||||
mgmtDb = _createDbConnector("poweron_management")
|
||||
knowledgeDb = _createDbConnector("poweron_knowledge")
|
||||
if not mgmtDb:
|
||||
logger.error("Part C: Cannot connect to poweron_management, aborting document migration")
|
||||
return
|
||||
|
||||
for row in rows:
|
||||
fileRef = row.get("fileRef")
|
||||
instanceId = row.get("instanceId")
|
||||
docId = row.get("id")
|
||||
if not fileRef:
|
||||
if not dryRun:
|
||||
_deleteRawRow(ccDb, "CoachingDocument", docId)
|
||||
continue
|
||||
|
||||
if dryRun:
|
||||
logger.info(f"[DRY RUN] Would tag FileItem {fileRef} with featureInstanceId={instanceId}")
|
||||
stats["documentsTagged"] += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
fileRecords = mgmtDb.getRecordset(FileItem, recordFilter={"id": fileRef})
|
||||
if fileRecords:
|
||||
updateData = {"scope": "featureInstance"}
|
||||
if instanceId:
|
||||
updateData["featureInstanceId"] = instanceId
|
||||
mgmtDb.recordModify(FileItem, fileRef, updateData)
|
||||
stats["documentsTagged"] += 1
|
||||
else:
|
||||
logger.warning(f"Part C: FileItem {fileRef} not found in management DB")
|
||||
|
||||
if knowledgeDb:
|
||||
fciRecords = knowledgeDb.getRecordset(FileContentIndex, recordFilter={"id": fileRef})
|
||||
if fciRecords:
|
||||
fciUpdate = {"scope": "featureInstance"}
|
||||
if instanceId:
|
||||
fciUpdate["featureInstanceId"] = instanceId
|
||||
knowledgeDb.recordModify(FileContentIndex, fileRef, fciUpdate)
|
||||
|
||||
_deleteRawRow(ccDb, "CoachingDocument", docId)
|
||||
except Exception as e:
|
||||
logger.error(f"Part C: Failed to migrate CoachingDocument {docId}: {e}")
|
||||
stats["errors"] += 1
|
||||
|
||||
|
||||
# ─── Main entry ───────────────────────────────────────────────────────────────
|
||||
|
||||
def migrateVoiceAndDocuments(db, dryRun: bool = False) -> dict:
|
||||
"""
|
||||
Migrate VoiceSettings + CoachingUserProfile voice fields into UserVoicePreferences,
|
||||
and tag CoachingDocument files with featureInstance scope.
|
||||
|
||||
Args:
|
||||
db: Root database connector (poweron_app)
|
||||
dryRun: If True, log actions without making changes
|
||||
|
||||
Returns:
|
||||
Summary dict with migration statistics
|
||||
"""
|
||||
if _isMigrationCompleted(db):
|
||||
logger.info("Voice & documents migration already completed, skipping")
|
||||
return {"status": "already_completed"}
|
||||
|
||||
stats = {
|
||||
"voiceSettingsCreated": 0,
|
||||
"voiceSettingsSkipped": 0,
|
||||
"coachingProfileCreated": 0,
|
||||
"coachingProfileSkipped": 0,
|
||||
"documentsTagged": 0,
|
||||
"errors": 0,
|
||||
"dryRun": dryRun,
|
||||
}
|
||||
|
||||
wsDb = _createDbConnector("poweron_workspace")
|
||||
ccDb = _createDbConnector("poweron_commcoach")
|
||||
|
||||
# Part A
|
||||
if wsDb:
|
||||
_migrateVoiceSettings(db, wsDb, dryRun, stats)
|
||||
else:
|
||||
logger.warning("Skipping Part A: poweron_workspace DB unavailable")
|
||||
|
||||
# Part B
|
||||
if ccDb:
|
||||
_migrateCoachingProfileVoice(db, ccDb, dryRun, stats)
|
||||
else:
|
||||
logger.warning("Skipping Part B: poweron_commcoach DB unavailable")
|
||||
|
||||
# Part C
|
||||
if ccDb:
|
||||
_migrateCoachingDocuments(ccDb, dryRun, stats)
|
||||
else:
|
||||
logger.warning("Skipping Part C: poweron_commcoach DB unavailable")
|
||||
|
||||
if not dryRun:
|
||||
_setMigrationCompleted(db)
|
||||
|
||||
logger.info(f"Voice & documents migration completed: {stats}")
|
||||
return {"status": "completed", **stats}
|
||||
|
|
@ -112,12 +112,12 @@ def _buildEnrichedAutomationEvents(currentUser: User) -> List[Dict[str, Any]]:
|
|||
if automation:
|
||||
if isinstance(automation, dict):
|
||||
job["name"] = automation.get("label", "")
|
||||
job["createdBy"] = _resolveUsername(automation.get("_createdBy", ""))
|
||||
job["createdBy"] = _resolveUsername(automation.get("sysCreatedBy", ""))
|
||||
job["mandate"] = _resolveMandateLabel(automation.get("mandateId", ""))
|
||||
job["featureInstance"] = _resolveFeatureLabel(automation.get("featureInstanceId", ""))
|
||||
else:
|
||||
job["name"] = getattr(automation, "label", "")
|
||||
job["createdBy"] = _resolveUsername(getattr(automation, "_createdBy", ""))
|
||||
job["createdBy"] = _resolveUsername(getattr(automation, "sysCreatedBy", ""))
|
||||
job["mandate"] = _resolveMandateLabel(getattr(automation, "mandateId", ""))
|
||||
job["featureInstance"] = _resolveFeatureLabel(getattr(automation, "featureInstanceId", ""))
|
||||
else:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue