From ebfdd9ab039f63f9e0ca939e8e94f4be0b2554ab Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 22 Sep 2025 07:44:39 +0200
Subject: [PATCH] admin interface
---
app.py | 14 +
env_int.env | 32 +--
env_prod.env | 32 +--
modules/connectors/connectorDbPostgre.py | 10 +-
modules/routes/routeAdmin.py | 256 +++++++++++++++++-
modules/routes/routeDataConnections.py | 20 ++
modules/routes/routeDataFiles.py | 252 +-----------------
modules/routes/routeDataNeutralization.py | 274 +++++++++++++++++++
modules/routes/routeDataUsers.py | 159 +++++++++++
modules/routes/routeSecurityLocal.py | 125 +++++++--
modules/security/auth.py | 115 +++++++-
modules/security/csrf.py | 96 +++++++
modules/security/tokenRefreshMiddleware.py | 191 ++++++++++++++
modules/security/tokenRefreshService.py | 291 +++++++++++++++++++++
14 files changed, 1526 insertions(+), 341 deletions(-)
create mode 100644 modules/routes/routeDataNeutralization.py
create mode 100644 modules/security/csrf.py
create mode 100644 modules/security/tokenRefreshMiddleware.py
create mode 100644 modules/security/tokenRefreshService.py
diff --git a/app.py b/app.py
index cade2e7c..02e05076 100644
--- a/app.py
+++ b/app.py
@@ -274,6 +274,17 @@ app.add_middleware(
max_age=86400 # Increased caching for preflight requests
)
+# CSRF protection middleware
+from modules.security.csrf import CSRFMiddleware
+from modules.security.tokenRefreshMiddleware import TokenRefreshMiddleware, ProactiveTokenRefreshMiddleware
+app.add_middleware(CSRFMiddleware)
+
+# Token refresh middleware (silent refresh for expired OAuth tokens)
+app.add_middleware(TokenRefreshMiddleware, enabled=True)
+
+# Proactive token refresh middleware (refresh tokens before they expire)
+app.add_middleware(ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5)
+
# Include all routers
from modules.routes.routeAdmin import router as generalRouter
app.include_router(generalRouter)
@@ -290,6 +301,9 @@ app.include_router(userRouter)
from modules.routes.routeDataFiles import router as fileRouter
app.include_router(fileRouter)
+from modules.routes.routeDataNeutralization import router as neutralizationRouter
+app.include_router(neutralizationRouter)
+
from modules.routes.routeDataPrompts import router as promptRouter
app.include_router(promptRouter)
diff --git a/env_int.env b/env_int.env
index d6d1be1d..5677525e 100644
--- a/env_int.env
+++ b/env_int.env
@@ -10,25 +10,25 @@ APP_KEY_SYSVAR = CONFIG_KEY
DB_APP_HOST=gateway-int-server.postgres.database.azure.com
DB_APP_DATABASE=poweron_app
DB_APP_USER=heeshkdlby
-DB_APP_PASSWORD_SECRET=VkAjgECESbEVQ$Tu
+DB_APP_PASSWORD_SECRET = INT_ENC:Z0FBQUFBQm8wTnBISUNVLWVobHJzX0xtS0pMcV9neXY3S05qc1F6RU9SRTdHM2F2VW1ldVlMYU9zRTU2OE9QTDBmcGRjN3ZUb1dobGZrUHZrR2EyWURtUXRYWk5MTExMVUJxY01yaFBTWFE4OTlHNHBsWHFSUnc9
DB_APP_PORT=5432
# PostgreSQL Storage (new)
DB_CHAT_HOST=gateway-int-server.postgres.database.azure.com
DB_CHAT_DATABASE=poweron_chat
DB_CHAT_USER=heeshkdlby
-DB_CHAT_PASSWORD_SECRET=VkAjgECESbEVQ$Tu
+DB_CHAT_PASSWORD_SECRET = INT_ENC:Z0FBQUFBQm8wTnBIVmhCSEtCcDF6dXBCSkVzOTdaNUZVOUgtZ2JQQ3lMUjVKdUgxbnBkZHdPSFE5amNWTzhKNW4zcV9QSFdNakFVNXRVcDVlTnd4Tm51QjA2MTVVMVY1b3dBZHhQZXZLdUlsc3lBektKRjhIUXc9
DB_CHAT_PORT=5432
# PostgreSQL Storage (new)
DB_MANAGEMENT_HOST=gateway-int-server.postgres.database.azure.com
DB_MANAGEMENT_DATABASE=poweron_management
DB_MANAGEMENT_USER=heeshkdlby
-DB_MANAGEMENT_PASSWORD_SECRET=VkAjgECESbEVQ$Tu
+DB_MANAGEMENT_PASSWORD_SECRET = INT_ENC:Z0FBQUFBQm8wTnBISHA2OXVrWjhaQURZM3g4WGxiTmt3WW05WXBIRGVwNFNfdmphOGdUQ0ZCMUdFTlAzZlJTM2ZFaEhVWGRqNXBtREpTalItcDNxS1BJeEZKdWc0dWxHUm41QTBMZ3VqT3pHeFVmVUtJWE1YbTA9
DB_MANAGEMENT_PORT=5432
# Security Configuration
-APP_JWT_KEY_SECRET=rotated_jwt_secret_2025_09_17_2c5f8e7a-1b3d-49c7-ae5d-9f0a2c3d4b5e
+APP_JWT_KEY_SECRET = INT_ENC:Z0FBQUFBQm8wTnBIVXVUQnhWcjhvVFhtTDl5T1M1SXZZdjZDY0tIa0hmbnRuanUweUdoQ04xNzhod3VscG44V0xlNldzY2t1MVE5UjVjUTdSRUU1N3VBUGNVN0ozU0o1akNBX0x0X1FNOGE0TE9paTh0ZEVnZmNTbGFnSjBpNTBXMTZxemJwWmRTdkJOWms4VVRieGpSM3VtaFY3Zmw0NlJTbVVfbDdwYldVYUlfbGVFUGhsajVZPQ==
APP_TOKEN_EXPIRY=300
# CORS Configuration
@@ -51,41 +51,29 @@ Service_GOOGLE_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/
# OpenAI configuration
Connector_AiOpenai_API_URL = https://api.openai.com/v1/chat/completions
-Connector_AiOpenai_API_SECRET = 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
+Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQm8wTnBIS0RqLW13RThlbTNNYUdLa3pXeFVYVm5Mc0czREtRczRBSlVjcVJJcVpKU19kRUZTU2pqMGZFR2pHZnZ4TGdMeFJqbHl5aTYwa2pzcTlNZklnMUNIZHZwdGFuWFhGZDlkemI2cnJuRURBZVBmM3Fxbm91c0ZQai1UMGJSM29kanIzMFB4Z2x6QWcycVk2SzRHQXc2YmZRPT0=
Connector_AiOpenai_MODEL_NAME = gpt-4o
Connector_AiOpenai_TEMPERATURE = 0.2
Connector_AiOpenai_MAX_TOKENS = 2000
# Anthropic configuration
Connector_AiAnthropic_API_URL = https://api.anthropic.com/v1/messages
-Connector_AiAnthropic_API_SECRET = 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
+Connector_AiAnthropic_API_SECRET = INT_ENC:Z0FBQUFBQm8wTnBIN0pPeHE3SzFWbTNySU1NRThmcURKWWNiZ3pQLTlwSXZmd0JkTUxXb2VGTVIyeUhZb2JKRzJsQ1AwTlZBWl9RYkRaQkVoR3dxQkdGYUFmd0xRdW1jUGxXdjJPbDlDVTVtT1c3aldRVVNoWmRLd09TZW5xU1JOVHp1ZE5Za0xBODR1TlhMQ1ZiaEZ4Nm00QnpPSks4RGVxYUhqaGdvMWVwMzBKSTdIUEVXSE1XM1ZNUjNBWDAzLWxwLXlib29OV0pOV21MTkFpb0ZDLU5seHMyTldxSFdIZz09
Connector_AiAnthropic_MODEL_NAME = claude-3-5-sonnet-20241022
Connector_AiAnthropic_TEMPERATURE = 0.2
Connector_AiAnthropic_MAX_TOKENS = 2000
# Agent Mail configuration
Service_MSFT_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
-Service_MSFT_CLIENT_SECRET = Kxf8Q~2lJIteZ~JaI32kMf1lfaWKATqxXiNiFbzV
+Service_MSFT_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8wTnBIVXktVWJLTEdLSDd1MENKejQ2bzdCTUlTQ1ZELVJfSGhaeExkMjQ4N1dNVnhjZjRTMl83dlBqeEJCMHVabVpZVlQxRjhjQkRiOHdpMUNaODJqN0UtYW9GallJekY0U2RVZHpORXg4dThuc01uMy11ZGtDb01BQXc3TlE1ZXBjaU4=
Service_MSFT_TENANT_ID = common
# Google Service configuration
Service_GOOGLE_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
-Service_GOOGLE_CLIENT_SECRET = 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
+Service_GOOGLE_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8wTnBJeVpuNWVraERfUFBaT3BDRVk0T21KcGdrYU9zNGNyRkljNDR2TnB6R291VGJJM3d4RnBHTVVXYTRCT1F1RGFRYnNTX0xTLXFqVHVHTnN0bG9LeHdEbFpZcUNIMXFWY0dJYko4U3FNSk5vUnY2ZWRWWFJLUjR5WkJrZmpMU0pxNGI=
# Tavily Web Search configuration
-Connector_WebTavily_API_KEY_SECRET = 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
+Connector_WebTavily_API_KEY_SECRET = INT_ENC:Z0FBQUFBQm8wTnBJV1BlRS1UaTZmZkVYZ2hQU0lBRXVEbDl3N3BFNVI4MlBsN2JRSHdrYV95SC1vdk1pMnQzNGRaQThrRy1HcEJyT1Y2OXdQcmw2Yk9KQ3RDRzRpamx2cFpkYkN5SjkzNVVmZnVaOWJnN0MwTGZMcVdRdU1jY2kwVGhNMXZQUG9kajA=
# Google Cloud Speech Services configuration
-Connector_GoogleSpeech_API_KEY_SECRET = {
- "type": "service_account",
- "project_id": "poweronid",
- "private_key_id": "88db66e4248326e9baeac4231bc196fd46a9a441",
- "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDTnJuxA+xBL3LA\nPgFILYCsGuppkkdO6d153Q36f2jTj6zpH3OhKMVsaaTBknG2o2+D0Whlk6Yh5rOw\nkWzpMC3y81leRLm5kucERMkBUgd2GL4v16k6m+QGuC3BFlt/XeyuckJNW0V6v/Dy\n3+bSYM7/5o1ftPNWJeAIEWoE/V4wKCYde8RE4Vp1LO5YwhgcM4rRuPmF2OhekpA+\npteYwkY/8/gTTRpZIc8OTsBYRbaMwsjoDj5riuL3boVtkwZwKRb+ZLvupXeU7Ds7\n1305odTcZUwnImHiHfuq83ZJViQiLRNhUAFnQIXPrYLwEpCmzRBGzYHaRlb69ga/\nzqUbKnclAgMBAAECggEAH6W9qHehubioPMAJM7Y6bC2KU/JLNS4csBZd+idb52gG\nwBwIEFjR+H4ZjymhAA4+pe7c4h7MKyh0RI/l7eoFX98Cb+rEq/r1udm1BhGH3s2h\n2UiI8qRQh1YRjF2/nrN5VjhDBOFa6W9opaopZy/l8AzsT8f21zIgPen8z8o6GpFg\n64fJFcbqCGk2ykN2+x2pIOT04tmCszrfbXZP8LEs4xrUB/XwlHL1vT/M3EWIKbnj\njDaIMjw7q/KRgNUvmKS6SU9b3fnOLcQCz9f5cKdiWACKIU/UvuiWhWJ9ou6BWLWU\nva1A6Fi4XJjhW7s3po58/ioQfl0A9p/L92lGg4ST8QKBgQDx8LIM1g0dh9Ql6LmH\nBUGCOewNNXTs+y3ZznUfvVMoyyZK5w/pzeUvkmOwzbRGnZJ9WyCghq8aezyEpo2D\nPL7Odf988IeHmvhyZIM4PLJYgDvSwGXyf/gh6gJkf/4wpx+tx/yQYNBm3Rht7sA0\npSaLehK0E0kW1uyBzHGKgyQOhwKBgQDf6LiZ7hSQqh54vIU1XMDRth0UOo/s/HGi\nDoij29KjmHjLkm8vOlCo83e79X0WhcnyB5kM7nWFegwcM1PJ0Dl8gidUuTlOVDtM\n5u2AaxDoyXAUL457U5dGFAIW+R653ZDkzMfCglacP8HixXEyIpL1cTLqiCAgzszS\nLcSWwoAr8wKBgQC4CGm3X97sFpTmHSd6sCHLaDnJNl9xoAKZifUHpqCqCBVhpm8x\nXp+11vmj1GULzfJPDlE8Khbp4tH+6R39tOhC7fjgVaoSGWxgv1odHfZfYXOf9R/X\nHUZmrbUSM1XsNkPfkZ7pR+teQ1HA1Xo40WMHd1zgw0a2a9fNR/EZ9nUn4wKBgGaK\nUEgGNRrPHadTRnnaoV8o1IZYD2OLdIqvtzm7SOqsv90SkaKCRUAqR5InaYKwAHy7\nqAa5Cc73xqX/h4arujff7x0ouiq5/nJIa0ndPmAtKAvGf6zQ6j0ompBkxAKAioON\nmInmYL2roSI2I5G/LagDkDrB3lzH+Brk5NvZ9RKrAoGAGox462GGGb/NbGdDkahN\ndifzYYvq4FPiWFFo0ynKAulxCBWLXO/N45XNuAyen433d8eREcAYz1Dzax44+MdQ\nHo9dU7YcZvFyt6iZsYeQF8dluHui3vzMpUe0KbqpZC5KMOSw53ZdNIwzo8NTAK59\n+uv3dHGj7sS8fhDo3yCifzc=\n-----END PRIVATE KEY-----\n",
- "client_email": "poweron-voice-services@poweronid.iam.gserviceaccount.com",
- "client_id": "116641749406798186404",
- "auth_uri": "https://accounts.google.com/o/oauth2/auth",
- "token_uri": "https://oauth2.googleapis.com/token",
- "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
- "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/poweron-voice-services%40poweronid.iam.gserviceaccount.com",
- "universe_domain": "googleapis.com"
-}
+Connector_GoogleSpeech_API_KEY_SECRET = INT_ENC:Z0FBQUFBQm8wTnBJSDh5aW9CNE04dDVsYnBUNWdGUWhDMmFlNmY2bnl5X1llVnNZV3VGakI5RFFNYVprYm5mU0F6TVZ5NDZkYlVhMGpzM0RPNGFmNDdvV1c0Y2hUQnowYzRmREhwRk5fMVVnejlGR0Y3V1pVemtFbEZEeTFEOUptbThaSHJJeGtwWGZZQ0VLYkpaTGRXMVFxX0hRX2treG1ES2VheTdsR1U3eUxYV0xPbzExSDZzOHBQR0FSdGh2V0hXRFpRLW52ZlJyMTBDR1VkVkh4VU52MWVwMDdxYlBfbjlMeTd5M0FIVGtaT1c3WmpwZjh6Skp2djB2cXM5NkdOS29ONzdaNGk0WlpzTlJxU2cwQWxTd25XYmllbjJXemQxY0ljZkZqZFV3MEhucXNfYUR3T3diWEFyLS1WQmRiZEJXbERuQXhXanZQUDBJZFphZGk5aHFTQVRkM3B5QllYZ0Q0V19VRlRtVThEb09TWGFHVHRKc0R3eGRoYWpkT0xRbXhGb0pFYUk3MXBGekV6WDdzekMtaU1JNlNaaXdQa19keUotSDJkZDNQTVpZQjlxLWhwRWIta05YR0sxTXRVS1ZLaHRJM1IwUTMtQlUtbHU4dmVfQjdsY1Y3ODFSZXBiQUJIdFNrR3dGelkzWjhQaXR1NlFIYm1KMFVNMmlMcGQtRE1zNmx0ZTRuVUhVRUFuNUEwTnNNSTBnaTRtaVNOT0lLQTR3U01SOHNjZkdOQ2VXQXBuZ3k3Q3NjbDh1dU5fWXVkN0pvNmxZWlQyaVVLNEFEN3dxRkV5NUU1dG5kdGxieXo3WGhIalAzWjQ0TFNLRFFVZkJFRUNjQW1xNWdUUHRTOG0xVklmd09NdGd4SURxdmI2UXU3U01PVDZSM0lhbDFQUjZobkl3VC10eThuV3BSV0l3Nkw5X1dVN1RhYkdqb0ROempfQ2xjc0lQemtaSGNkMjJjR1hjN0V3NFhta2l1MVRGeG9PekdhM1V6NGpCMG5yYmZJb1BmdndyMXdpOGdSSldmRFg0UlZSX3EyTVN3ckotaGJLbU5EMG1jYnY0VmtFNk14dzdzWVloVFhWMkQxNDlmc3QwSWJZV2ZaU3J2NlNkdHlyQUVXUTNXczJZMzBua0Zmbl9MYWxSSi1QdUowQkdINWJIZFNoUlY5V2NYYjFva3A3OHZ6MEd6MGRvNkJrQjNKa21FcHI2Y3pfTWQ2TUFzUEp5M1FZazVUSUVUTnlkQ2U1RVU3OFdYYXE0S3QwT210a1c5aVlPNDhET3JBeUFHeG41MDM3aUdXVWN6TUtUMi01aGJOWGN4WTZDN29WNl9SdGRtR0gzRzJFZmhZa3p6UUpoVEMyb196aFdUVE1nTDNDSkJuN1lsSWlMWlBVS3VhclRxd1ZhWXhNUDZRa0Jlb3N5UkhiZ2pYc29ZZm55bWFZa09DZ2lvZmE4YTRoYmJJREh0ZXMtSkN3MkJBNDlJQTR1MTlEVTFQTUFJMENCQmFCeGtXYlJJVmtSUjBuNXBDa0wtVTJuVk8tVGk2dWxmeEgyV3pkOFdDU1JhRnowLU9EbWZhYWkyZVRfSDVJWDdtd0l0TF9OQ19NRi1tUnAxdHg5a0dFSDY5RzFsR1NiV0p3VG5DckNyREVjcWQ0elV1aktKNlJkNXdIVnpXY3U1bTBUbVJ5a3VucDdualg2cU1rZkEtOWpWa2tGU0puNUpNUzVaV3Y4UmhiZWhLTkdzS0h5NkxkcmNLblB2dG9lb2xYYXlqZkZiM3ZRTFVtM1VwOFFGQ1QxWFh2cUlhMFFyME5rSEJwLW5IUS1pRmNpVXVWYUR3emg0N2lDXzlFN1NnRk9ab0lLaEVvaV9FcEVfR0VBUzRkWG9KUm1sRk9DcDEtSGQzMFFXLUt3QnBpaV9fV2lQVExXSDcwc0E1ejE5SWd5c0NnQTlyWlBuVFNCeFpxN0M4M3kxaHN2RmJXekxiNy0zTEN0N2daLWpERFJ3SFFMUk11N25mRVk0MHlyMHE1d1NDbFpFaTBGSDFFcksyYVJZUEdKemtNWE9qbDBfMEpyaVVaSFdOZWpodGt4N0g5TGN1NVRQUm41cTNxRWdyTDFjd0xTSEZibUt3R0pISkpmRzlSVEYzeWNUdm5qUEZPVlVJX1d5MGpxUWNjTzFMaGlEVG9GY3RhUWZiMFpsbVR2OHNLaUs4ZkFENmNoRFRyd290a1FFZng1ckczaWxMMEVsZ3dBb0ZXdlh2YzJxTlhwZkJTV2VBandjWFB3MjJrRTR4LUUxV2lCRTdGYjNoeVhZTUx3RjFRNlFoY1VYMHRyTmlxdm9jUjAtWndLQ1RNcENDclh4TkR3ZE9tSjFOaGxIcmtWQ2ZIaHRabGNJQVI3RnZHUGtBRWt3YmpuUUhrT0VRTUxfVFZOSWZ1Z25IWUsxVDIwZEY0blQwbEdXY3hETW41UldqcW0zMUNCVHNDZkRyTGlrVlU3c0lWdFpvUzlfTGtLMGxJZUg4dUdjTU01VUtYaVQtSFBqT2F4NXhEUUlBRU1lSU54dzFhd3d2UjUxb3JXSUdQbVRyUTFlc2Z5WkNGWlNzVTc5aWllcGsxbzRmYVlFTWw4VVVtTDdkczdzQ3NFSGMzdVltSjlfY3dyNzlPaEk1cE5jdk4weDFKc3BTUXpPbDI5Y1ptblp0TGJ5UGwzVmU4VEtWUTZEQlRtemp1YnppdDdkSHpyY0c4NlZqSmI1UVBwSTJSZ1NNcEQwRTdySGRySi1XUkhWTHlaajFZSkQwc1k5NGZDZUhzRFdBZXNqSVdwU0ZsMGlNLUZPSU1OT243N0RFQ091RjVyaVRBdEUtOWlfSTdpX25laHNwVFlFU0RjWE42amhxSHlvcDlDdE13Y3JtWFNsQkZoclVFS2hFblQ3MXpxOHJsUGMwbkVmT3ZMMFZiNmpHZVRCa2k0YTJnNHM0dWpyQXJaYjU1Z1hNWGU2aU1hY3RxYWVzU1hVWWI4bjR2Y0R1MmF0NDVlc08xTmVwQ01ENThUMC1kWk9SWG9IWWFDZ1V1RGhDY0pjVEZBUGJreXh3RTRuQUlTWVR4NlVuaGozbmJVTXNzRDdrMGZaclpsb290WmFaTzF0NThITENXN1JSN25JS18yWnpJY19maVlXN21QTWE5M1FhWGJqTFQ0RTZoY3dlcEM2Q0gzQ1RtYk9jTnVkU1drVnhZc1NkMm82c25CWkpaTW5KbTd5dGJHSHhhQzR2TFk1NW9pX2pQNDdOeVhGZ0swTklYUkRZbmdsVzNabTZjWDRrX3BCMC1OT0M4R0dMTWlhQXhIMXJVT0Z0bmp2aTBFWnRDSFYySVFULXRneFpBQ0ZwdEZaTHAyeWx3bFF0Q2o5YzlfbUQ5Xy1uWjFKQXVVTE9Qa2VPNDZmQ0kzaUxBNG9EdGpyZm9VOEZhTC05V2JBMU1KTmt6RlY5aUpILVE5bExnVkdCcDlLZExBeHRSWWxQS1pkcE9BWjdXeV8tb1ltcTVoalQwLTl5Mk5GanphVGxKUzlJUjk0Y1g2QXBSRzBSNkkzTjg3bTFlYU5XbE0wYkNacC05bzVFN3F4NXdkUFBqQU9QYWVtdUM1MWtNR1RzbzBQVHJGR1NzTEV5TzdCUXRmcXJuTUVCVV9QbXFXcG5QdmJfQWlVMXRvU1hOakpHandKbnFhZTlqUWtWbXg3MzFpS2I5SlJzN0dQN0JuaGNKSWtVX0Vxc1FPUk4zMzVTMVIzSWJYcUVYU3Jldm01bHpleU90MlU3XzhhVWI3a3pqajRjd0g3YVppY3MxNUdKU1lUZFhyUnZDYXJWcnltQ2tadEFxdG92aUI3MU96WnFBdXdLRWZkMFVtc0N0SDVJc0RYdjhJZVhVWXZfb2VDY3NxVEdZc05tRWNVZTRUVHphT1RyWlRoX25iNXlLX0pDeXZrbXR1VkxnUTlZOVpUak5GYzVUODZvVmtKNHo2QUg3b2pSZmNhY1lUUVA3WXktMjYxWjBiTE5XeWJlQ0VhY2VyVnRxd3hvUWxtSjhCSGNkOHFLRTF4QmpLc0FoYzg0b2xRZXZpVTZ5TENXNXdiTEZNV01nSXpPR2U4ZlZzXzZZeGdmSG5kN2hLeDR6WU0zWExxUXJBcXR6NmdZb0ZiZHRPSFVuRUVyZV9zOUJsN09GWE5YM25FRlNoLTlNNWVMcTctb2l1Mzdyb0Y5TDU1YjdnQVUwSFoxXzF0VFh3TGktbFl2bVQ3NzdkRHV3SlNfaEZjY1ItYUk0OHhLRk9rUXdqNUVTdGNBc1ZCY1pFQ0N0WE45MUEwUlpwejdrcDNBPT0=
diff --git a/env_prod.env b/env_prod.env
index ab046d57..62ce7253 100644
--- a/env_prod.env
+++ b/env_prod.env
@@ -10,25 +10,25 @@ APP_KEY_SYSVAR = CONFIG_KEY
DB_APP_HOST=gateway-prod-server.postgres.database.azure.com
DB_APP_DATABASE=poweron_app
DB_APP_USER=gzxxmcrdhn
-DB_APP_PASSWORD_SECRET=prod_password_very_secure.2025
+DB_APP_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMd0FKLUpzaTdYT0Zia2V3VExPSktfTUx6RmRDc1hobjhYamxyMTkxakhDeGVHRTA3TmVoNC1Mamh0elFiV0h5MnA3YmpheXRzLVdhN2Ytb2R4a1NiSWY0RlFQMXlJU2hUMFY1RGJ1dEdRTFE9
DB_APP_PORT=5432
# PostgreSQL Storage (new)
DB_CHAT_HOST=gateway-prod-server.postgres.database.azure.com
DB_CHAT_DATABASE=poweron_chat
DB_CHAT_USER=gzxxmcrdhn
-DB_CHAT_PASSWORD_SECRET=prod_password_very_secure.2025
+DB_CHAT_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMM3p1TEY3VTQxT0xrbW9fbHFJLXNDZHJUOVBSUHhhdURpMi1EZTZIaXQ0M1V5ZUZFQVhjSGF5SUVzTDNrWW11UlNQQVhwNEU0al9yZXQxSnRIU1U0akRDbFVIUHVvUV9SMkFkaEFGR1ZVUjA9
DB_CHAT_PORT=5432
# PostgreSQL Storage (new)
DB_MANAGEMENT_HOST=gateway-prod-server.postgres.database.azure.com
DB_MANAGEMENT_DATABASE=poweron_management
DB_MANAGEMENT_USER=gzxxmcrdhn
-DB_MANAGEMENT_PASSWORD_SECRET=prod_password_very_secure.2025
+DB_MANAGEMENT_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMRDJRY19uM1hTNC1mMzhVaGNtamtScGpVYTY3RUdBTlpTTDdrUF9PdF84WkFSakRoX0VEcGhwanBPSU9OUGJNWXJDblVUS0o0Y0FBd0hMejUyTXFJTFVCaUJmTkpVYVQzWXFRSDV2d1lENHM9
DB_MANAGEMENT_PORT=5432
# Security Configuration
-APP_JWT_KEY_SECRET=rotated_jwt_secret_2025_09_17_prod_e1a9c4d7-6b8f-4f2e-9c1a-7e3d2a1b9c5f
+APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMX2lyNHVQVVkzamE1eURGMkRoVmhJTTVSTEQ1c3E4XzlucExfdUNxTHNwazB2X1h4YzdUeDhsYWNCbUZ5VjJNVTZDYlY2dGhreTg5UGV2Z3A4X1FTc094XzhxdWRNSzBXd20yY3pFNkpUYzhaeml5ME9OMjFkNjZMQkdvczZnWTVYX09fR0RYQXhpVHFPQnA2cWh1T3pqTFVieXpHV1hlUjVQdWRCSEc1bk1ZPQ==
APP_TOKEN_EXPIRY=300
# CORS Configuration
@@ -50,41 +50,29 @@ Service_GOOGLE_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google
# OpenAI configuration
Connector_AiOpenai_API_URL = https://api.openai.com/v1/chat/completions
-Connector_AiOpenai_API_SECRET = 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
+Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMSlhwejcyRl9EUWpPX3M5bnI3QTNiRDd1QXVaVkFCczBzeUczcHhyenJvRDN0SDZGaHp6dGJqNjNiLW9oTjJPZGV1b0VxWElfT29jQ19vNWF4aG11bkRlS1JMa1VoeG82VWVmWkV0VDZUWTFmcXZXYUh6ZWs0bEswNXhhZ1ZEU1JNYk1jU0p3YVZkZmZVWmF4dURDcGR3PT0=
Connector_AiOpenai_MODEL_NAME = gpt-4o
Connector_AiOpenai_TEMPERATURE = 0.2
Connector_AiOpenai_MAX_TOKENS = 2000
# Anthropic configuration
Connector_AiAnthropic_API_URL = https://api.anthropic.com/v1/messages
-Connector_AiAnthropic_API_SECRET = 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
+Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMaEFnaHBDYndpTkZJSFp5OGdmY2xtNDZEZmFmbk1rUUQ2STZCQlprMjRhY3BLdkhTWWdDRlIzcm94NE5LZ2dCdlNkdWpkVVk2QnIzTzQ5TGEtX2p6a2kzeF9PR3QtNWs4aWFKX1ozUTNYT09sMkJNb1JMRk1vbTE0U0Y2eU1SUjhwY3Z2TWIyU2d4Nk1iS2d0YkRKUm0wNjNEbWNxYTg3SGNnU3FMSzVtYjhLVnhxbXd1SmZyam9QSGtna1dkSGlpeENEREZQck1tZk4tTkJvTERTcjZSdz09
Connector_AiAnthropic_MODEL_NAME = claude-3-5-sonnet-20241022
Connector_AiAnthropic_TEMPERATURE = 0.2
Connector_AiAnthropic_MAX_TOKENS = 2000
# Agent Mail configuration
Service_MSFT_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
-Service_MSFT_CLIENT_SECRET = Kxf8Q~2lJIteZ~JaI32kMf1lfaWKATqxXiNiFbzV
+Service_MSFT_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMMXV1OE5qODFrcGJqVEt2Zlk1TkFyQ3VoMzVad21UcTgwSXJqRjdiWmdsS0J3VWRBWWg4WWllNzE5X21ubGItMl96b0hZYTlXbVBkTmVhQVRadGlnWUlWQWdOZUV2U0pDSDdiWEhMNHJQUVllYzFpWFNJUnY0M0FpZ1ZWcExyWmk=
Service_MSFT_TENANT_ID = common
# Google Service configuration
Service_GOOGLE_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
-Service_GOOGLE_CLIENT_SECRET = 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
+Service_GOOGLE_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBMYmNCSXM5cnRBVUxlYm83VG11MlBGZHhiV2hWOWxWYk5XRk1hSmhsTGdsX2dHSGhxYk5FWEpEbXdQM3hCNE1nRjZHNjlDb0RMWTIwb2pqczdocjFkSWxfYWlLOU9KbmtUcTl1SmZJZUh2V1RwM2kzVkZhRFIyTERsaThXYS1OVFk=
# Tavily Web Search configuration
-Connector_WebTavily_API_KEY_SECRET = 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
+Connector_WebTavily_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBNVU9JZEcwUWFuQ0lfRElGdDRhSFJDNVVBNUhBVzVKQlhBZXNsUDluRXYyV1NuaWw3eEJMdnhscGNZNW5KVmgtMzNfSGRfX1RMZlB5SmtHSzNTMC1RUlp1c2dqOWhSVnhuVUVGVUlaak16ZjlpWW00OFVIRFU1aEZXYzNaN3VNS1I=
# Google Cloud Speech Services configuration
-Connector_GoogleSpeech_API_KEY_SECRET = {
- "type": "service_account",
- "project_id": "poweronid",
- "private_key_id": "88db66e4248326e9baeac4231bc196fd46a9a441",
- "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDTnJuxA+xBL3LA\nPgFILYCsGuppkkdO6d153Q36f2jTj6zpH3OhKMVsaaTBknG2o2+D0Whlk6Yh5rOw\nkWzpMC3y81leRLm5kucERMkBUgd2GL4v16k6m+QGuC3BFlt/XeyuckJNW0V6v/Dy\n3+bSYM7/5o1ftPNWJeAIEWoE/V4wKCYde8RE4Vp1LO5YwhgcM4rRuPmF2OhekpA+\npteYwkY/8/gTTRpZIc8OTsBYRbaMwsjoDj5riuL3boVtkwZwKRb+ZLvupXeU7Ds7\n1305odTcZUwnImHiHfuq83ZJViQiLRNhUAFnQIXPrYLwEpCmzRBGzYHaRlb69ga/\nzqUbKnclAgMBAAECggEAH6W9qHehubioPMAJM7Y6bC2KU/JLNS4csBZd+idb52gG\nwBwIEFjR+H4ZjymhAA4+pe7c4h7MKyh0RI/l7eoFX98Cb+rEq/r1udm1BhGH3s2h\n2UiI8qRQh1YRjF2/nrN5VjhDBOFa6W9opaopZy/l8AzsT8f21zIgPen8z8o6GpFg\n64fJFcbqCGk2ykN2+x2pIOT04tmCszrfbXZP8LEs4xrUB/XwlHL1vT/M3EWIKbnj\njDaIMjw7q/KRgNUvmKS6SU9b3fnOLcQCz9f5cKdiWACKIU/UvuiWhWJ9ou6BWLWU\nva1A6Fi4XJjhW7s3po58/ioQfl0A9p/L92lGg4ST8QKBgQDx8LIM1g0dh9Ql6LmH\nBUGCOewNNXTs+y3ZznUfvVMoyyZK5w/pzeUvkmOwzbRGnZJ9WyCghq8aezyEpo2D\nPL7Odf988IeHmvhyZIM4PLJYgDvSwGXyf/gh6gJkf/4wpx+tx/yQYNBm3Rht7sA0\npSaLehK0E0kW1uyBzHGKgyQOhwKBgQDf6LiZ7hSQqh54vIU1XMDRth0UOo/s/HGi\nDoij29KjmHjLkm8vOlCo83e79X0WhcnyB5kM7nWFegwcM1PJ0Dl8gidUuTlOVDtM\n5u2AaxDoyXAUL457U5dGFAIW+R653ZDkzMfCglacP8HixXEyIpL1cTLqiCAgzszS\nLcSWwoAr8wKBgQC4CGm3X97sFpTmHSd6sCHLaDnJNl9xoAKZifUHpqCqCBVhpm8x\nXp+11vmj1GULzfJPDlE8Khbp4tH+6R39tOhC7fjgVaoSGWxgv1odHfZfYXOf9R/X\nHUZmrbUSM1XsNkPfkZ7pR+teQ1HA1Xo40WMHd1zgw0a2a9fNR/EZ9nUn4wKBgGaK\nUEgGNRrPHadTRnnaoV8o1IZYD2OLdIqvtzm7SOqsv90SkaKCRUAqR5InaYKwAHy7\nqAa5Cc73xqX/h4arujff7x0ouiq5/nJIa0ndPmAtKAvGf6zQ6j0ompBkxAKAioON\nmInmYL2roSI2I5G/LagDkDrB3lzH+Brk5NvZ9RKrAoGAGox462GGGb/NbGdDkahN\ndifzYYvq4FPiWFFo0ynKAulxCBWLXO/N45XNuAyen433d8eREcAYz1Dzax44+MdQ\nHo9dU7YcZvFyt6iZsYeQF8dluHui3vzMpUe0KbqpZC5KMOSw53ZdNIwzo8NTAK59\n+uv3dHGj7sS8fhDo3yCifzc=\n-----END PRIVATE KEY-----\n",
- "client_email": "poweron-voice-services@poweronid.iam.gserviceaccount.com",
- "client_id": "116641749406798186404",
- "auth_uri": "https://accounts.google.com/o/oauth2/auth",
- "token_uri": "https://oauth2.googleapis.com/token",
- "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
- "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/poweron-voice-services%40poweronid.iam.gserviceaccount.com",
- "universe_domain": "googleapis.com"
-}
+Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQm8wTnBNR2pBeUR6NmFTcFZkQmZnSmlweEpfQ1RSNnFobHZ2N0Z5Q1luRVJWSmx1WFJTMlloX2hROVQ2TjlvRTJQWjVuT1F4WjJmUVdCbTEyYTQxY2tjSXA2S2Q2NTN6RnRiQmxTTWdBeC1GdnNZUV9KWU1IQVFaQnc2VmJFSHVPVDFhaF9VYUJjWS1MRTlNSEkzUl9lTDZjX016MHZKSVE0bUNjZ3Q3Ni0zOXNfMmQxYUhTcnRzSDdQSUlyUExjUHpYQTF2cG9CS2dZajJLUzZUS09JeGRDRkdPd3Y4VFhWaS1DY3FOc0hQWXhNNVc1LTI4RjdKYjktWXc1X3hVUmg5VnhQX3BuQzJzZ0ZnbUVLNHZHWTRqNjI3VDlrNDlMLWNFVl9sVFd5cm0xbjlXa3VMOEthdXdWTkFPR2ZjQm5ReGNqbFN4NTg5NTVqcmlETGoteWhqLVEwSTNBQTN0Y2ptc0JBd1BabjUwaml0NVllOF90ejhObkR0VHdIU28yRmY1QWk1VktURW1DNWZPR2FnQ2dqS3lWSU8yT3ZDN3J5Y2FzRmJOOFNTWGZhUXNzbmFKOXdxakRfekFyZEFSQlF1QnJIMF9idktTQlp5MWI3eklrOHJPUmVxOFRyWmJNSGVNXzhDSTkwd29tSTJhTVM0T3lMYWhQZzEyd0RYX21NVVFTMm5JMHVpOFFfSHdkc3RZX1A2czVtRGwzU2RZUTVYaEp1TGpfejNwLXdZY0pZQmotOXVGbGRxcnFNdU9XUEZKODJvWnA5TE1mYktjUGUtRkJJbHZuTF81em0wVTdVWi1QRGk1dG45cWR6QkZmRHd0WWRqZE9xR0FCRXktenhLYmNpY2pfYTU0bUtPYk5KWFVRc1E2M0dsSWVFWWJucXc4QnBBanRjZk5GbzVKXzVMX0ZzTFZNYUZXTm8zNjMtb1dEU0VkTmtaR2xPZmZXam5qTVIzMXJpeG5raXVJOHdBZXhXeTA1SjBJU0xHZjZubDk1Z2k3ZFltdzRQbzZZNlJfcHVHWjRzWXJIVlAyRVU1eVVYVzd0R0JoaU5DWEF6ZkdDeGZXdDFiMXpiTkEtTUtuMEt6ZFJyMG0yN2NFbDFna2V6dEl5cmJkS2liOHV5OUhBVGJRX29vVmk5VnptUEhoT01oOTlHeHZaUTVCbTQ3Z1ZkY3J0NnpPcl9JLXVYOXBCZVh6blNSVFJFVUJBT0lOckNyQjI4SlpBV283TUtBYWpZMWVpV1p2czQxZHlBYnJ6d2JUQjZ1WXRGdEdkYy1keHFjRG51N0NSN1c1ZEttTXRfSGZEbzhsV0p3aW1rbHJpS0pEaV9IYTRiVi1WLWF5TUJGMnhVZ3c4dUxiY2ljTXE0S0JJUWM2M21NemwtdlhxcTJta1ROYUpUTmVaLThyQWRxS2NpM0QtaVJyQVU3WWQ5V1kwdUp6dzlpbUxzekxwVGgtQl83MTdWeFoweUZLcFZwNnQ4cVdfMkw2MHBZUU1CSXJNMjk3YThpczRqWTBuZjlRSUJfMXQtdk1uZUVTMzVvNFNYekFmRFVJSG1ib3RzMHZiQXVqa0VWNzNid3RfTzNiTDJrU1VBY0xoWDV3Y21OMWpzUW5IZ25RRWZRT3ZwLVVESG8wUGNBcXFfWGZKUVZPLWVnNlRjaFRibmlrLTgwUmhRRWVNNFRUWjBjbjNHSFUydGY3NWNQSTc3NWlXY2s5U0lOQ2hUeWZiVXdVOV8wRjdFTXNzN25nSE9JMTJqNmxEMTN2U0N1TnRDWWJpMm9WM3FRbWY3bWZGY2huUURlQmdIdkRBNGhLWEl4M1hqNkotY1FBWm9xX2FJZVRBekZoaWx0R2k4eEF2T042dkV3cmVhMG42Q3NTM3dILTI3NEVadUpiUjU2cmxVeExMTlFpVzBfWmxmZVRXSU4xWmVhdTZqaVpmeWhwUjB1VE15SVFtMFhqUTVLOHd4dEVkQ2hiSk5nczJ5aUV1Vk85OW05YWJMYkdMSXRCZV9WLWxudEtUX0FKR0hDNjdMcEREMUlWMHJaV3RUcUpMLXlEU0Q2ZUhETVJycENlTjV2VktibXhTelJDcUFYWWRwV1VHbldtOFdHRlZJcDc2dzBXZGlJTnlSeU9mUzZHRzJNd25WS1FfdUpCRUd0NDdGaFoxVU41Qi1pM1ZQdHZ1THJBdkRMeVYwNGpkRWl2WWtrbnpoRWVFMzc2ME9WVUNNYXEtRnJKWnprekhxcHJVRVRTdHNlSE1ZdWFtLVRjWDJvVVFBTS01NXBjbGlNNC1Dcm1NaUdiUk1uUDFDdlJ0UFRlSkR0eDJwOWladzFxOFpoY1lUc3o1MmUzWm1MYUs3MjYzdk9KbWtrOWxDVWhlOTY5TlVEM210YnlmZXhnUzR6bTVHbi1IS3ZyX0lSc0FtOVBJV1BBeldJNDk1UUxaSW90UG5IY1hJZVhhWHQ3ZzQyd29YOU9DNEthWXdnRkx0aDB1LTBUN0VOUlBGZ2ZPLWY1UXdTQVRCRmhJQ090a2xUbzN6YXhuTmNqaWJpajR2Mm1Rd2lxelc3UjRSRm11dFBlVlRsTlRoRUtPYkJVbzh2Q2Y2MUhqZnNsb3E5cUhLV0hFRTlQVzcwVm5DZ283ekJPNDRxSi1neTZYR3E3UEdXT2kwLWVaZGxFUnlaazRHa01TSEtyRy15S3QydHBHcnlMZWx5Z2xqY1ZkTEVIMERIajl1dW5JZzY2NWVjV0JSb1NzSG5OeXNheFR6QmoybjlBeThHVkJtNHduaXlJYkVySlNYTDMzNWpneHlXeklvYWRvQzAzV1lDa2lzaU5CUEFNNVdLMEpWZ1BVbUhhNm1NX0k0ZVM1dFNFOXFaSkVuTXhFUHYtdF82bGxfcmFMMG9kTUtMOG9adWF3VFlYVnBTSjREbWRMX0pLR2ZHRHpIdGMzYW5OVUtzVGp3OXc5WDJfd0l2T0lVN2xvMjBzSHZSaWY1cDhTUmhVdDR0dWgtMGp4V2V2bDRQSC05cFdyNHBYaHF2dldwTVVNWExYZEZFQTlmaHFMald2LTdJX3lfRi1WbXhfXzRtUzNXVXBHUTBocFRST1h0akVZc1BleDc0aGxidS0zTjl1cTA1aEdWRXUzaXlNQUR4N0ctdkhISE9DSGl2amdyTFZ5bTFnakFrVndhQ0ZTYi1GcHM2TG5aU0xXUGhjMVRBaTRNYzBlcXdtYkdsQ0xwTmxmRzJpV3NnUkdMMDA5czFaUnR0R01TSmZqeHB3MGM0WkxpNzE2SXpSaFhENzh0OWU2M01JZ1pCWHhZdGN0aUJmS3lSWHNMWXhZNnVZSHlrYkFkNHlaVm1FcEwyTzRYN3dhdGItVEh6TS1NU2R6YkNhNjEwSEpwdzF1WUdtWldJQ2ZkUEhqM3VhTFpvWjhxQ2dpNlhpY0NTcTEycm9GQk9NM3Bla1F2REJRLV9ZYW1Dc19JZ1NFOW5yLUxCX0tvakE4ODhhOEh0SXA5anJjRGJ6d3dObzVfU3FYbW1kLWVCSThIWnl5TEh1OFlVTml5QTRDVlJPaU9mZTB0NjhSZjQtT0lpa1piWmwybUFQOURlOHBtQW9KdE5SZTJzR1A3MkJVS0xuZTRKSmYtcW5OT2M4Y3BWaUVYQUJTblpsR0xYckx1eklwNzh4ckpBNUxaMWlKd1dINjQ0UWUzejNPcnpMWHZHZTg4NlhyTnlhdXZSQ093QWktTnNLcU40aTBGUFFNUWFKcU1EZWRQalVxbEtIcENTWXl4NXRRLUkxYXhGeDkzY3pfcVgyUlFWa0dVVXNHU3dvOXRDZC1TYnZVS1d4SjdKT3NKTjRMbFZqUlJXSkRQQnNUM2VkbWRhaG1qMFBVQ1VxX0FNYnJmbFI1RjgxSjE4c25VNm1EOXpaanhQLXVJcXEyN2VaTExZMURzYThwdzh5NEVUY3Rab0plT0tWRU5rQmJvOTlWRlp3elRQSWd0LW5laGpQaFFtUzdZVzExSjQ3ZVowck82TjZjektjZzZ4NElGa2pIYjJHa29FR1BsSG5HZGlmd0xJU0RSUW96emZVSUNzSzNCTlNUcGNLQzVSaTRacWtOUWxwOTBLdkt5Q19PWDRONGp4bVNXZHd5ZmtHSVFfX0lCNFhrY2NVU0N1MllJOGtmcVFXQXpJRkdiX2dSVWRuakdldHZ5bEo2U21jQmVtcHpTaHlVZ0k4ZF9xemxMOU92bHllM2VVd1FPUDVRNC1DTFZ1U25yQnlrNm4yUi1qNTRtOU9OaVVYVzZ6Z28wT3lubDk1SF95Zm9rSWZsMVg5NnNDQTl1YzhRPT0=
diff --git a/modules/connectors/connectorDbPostgre.py b/modules/connectors/connectorDbPostgre.py
index c6b254e7..ce1cfe9b 100644
--- a/modules/connectors/connectorDbPostgre.py
+++ b/modules/connectors/connectorDbPostgre.py
@@ -598,18 +598,24 @@ class DatabaseConnector:
tables = []
try:
+ # Ensure connection is alive
+ self._ensure_connection()
+
+ if not self.connection or self.connection.closed:
+ logger.error("Database connection is not available")
+ return tables
+
with self.connection.cursor() as cursor:
cursor.execute("""
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
- AND table_name NOT LIKE '_%'
ORDER BY table_name
""")
rows = cursor.fetchall()
tables = [row['table_name'] for row in rows]
except Exception as e:
- logger.error(f"Error reading the database: {e}")
+ logger.error(f"Error reading the database {self.dbDatabase}: {e}")
return tables
diff --git a/modules/routes/routeAdmin.py b/modules/routes/routeAdmin.py
index f895521c..60663598 100644
--- a/modules/routes/routeAdmin.py
+++ b/modules/routes/routeAdmin.py
@@ -1,4 +1,4 @@
-from fastapi import APIRouter, Response, Depends, Request
+from fastapi import APIRouter, Response, Depends, Request, Body
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
import os
@@ -6,16 +6,14 @@ import logging
from pathlib import Path as FilePath
from typing import Dict, Any, List
from fastapi import HTTPException, status
+from datetime import datetime
from modules.shared.configuration import APP_CONFIG
from modules.security.auth import limiter, getCurrentUser
from modules.interfaces.interfaceAppModel import User
-
-router = APIRouter(
- prefix="",
- tags=["General"],
- responses={404: {"description": "Not found"}}
-)
+from modules.interfaces.interfaceAppObjects import getRootInterface
+from modules.interfaces.interfaceChatObjects import getInterface as getChatInterface
+from modules.interfaces.interfaceComponentObjects import getInterface as getComponentInterface
# Static folder setup - using absolute path from app root
baseDir = FilePath(__file__).parent.parent.parent # Go up to gateway root
@@ -33,24 +31,79 @@ router = APIRouter(
# Mount static files
router.mount("/static", StaticFiles(directory=str(staticFolder), html=True), name="static")
+def get_interface_for_database(database_name: str, currentUser: User):
+ """
+ Get the appropriate interface based on database name.
+
+ Args:
+ database_name: Name of the database
+ currentUser: Current user for interface initialization
+
+ Returns:
+ Interface object for the specified database
+
+ Raises:
+ HTTPException: If database name is unknown or interface cannot be created
+ """
+ # Get database names from configuration
+ appDbName = APP_CONFIG.get("DB_APP_DATABASE")
+ chatDbName = APP_CONFIG.get("DB_CHAT_DATABASE")
+ managementDbName = APP_CONFIG.get("DB_MANAGEMENT_DATABASE")
+
+ if not appDbName:
+ raise HTTPException(status_code=500, detail="DB_APP_DATABASE configuration is required")
+
+ # Map database names to their corresponding interfaces
+ if database_name == appDbName:
+ return getRootInterface()
+ elif chatDbName and database_name == chatDbName:
+ return getChatInterface(currentUser)
+ elif managementDbName and database_name == managementDbName:
+ return getComponentInterface(currentUser)
+ else:
+ available_dbs = [appDbName]
+ if chatDbName:
+ available_dbs.append(chatDbName)
+ if managementDbName:
+ available_dbs.append(managementDbName)
+ raise HTTPException(status_code=400, detail=f"Unknown database. Available: {', '.join(available_dbs)}")
+
@router.get("/")
@limiter.limit("30/minute")
async def root(request: Request) -> Dict[str, str]:
"""API status endpoint"""
+ # Validate required configuration values
+ allowedOrigins = APP_CONFIG.get("APP_ALLOWED_ORIGINS")
+ if not allowedOrigins:
+ raise HTTPException(status_code=500, detail="APP_ALLOWED_ORIGINS configuration is required")
+
return {
"status": "online",
"message": "Data Platform API is active",
- "allowedOrigins": f"Allowed origins are {APP_CONFIG.get('APP_ALLOWED_ORIGINS')}"
+ "allowedOrigins": f"Allowed origins are {allowedOrigins}"
}
@router.get("/api/environment")
@limiter.limit("30/minute")
async def get_environment(request: Request, currentUser: Dict[str, Any] = Depends(getCurrentUser)) -> Dict[str, str]:
"""Get environment configuration for frontend"""
+ # Validate required configuration values
+ apiBaseUrl = APP_CONFIG.get("APP_API_URL")
+ if not apiBaseUrl:
+ raise HTTPException(status_code=500, detail="APP_API_URL configuration is required")
+
+ environment = APP_CONFIG.get("APP_ENV")
+ if not environment:
+ raise HTTPException(status_code=500, detail="APP_ENV configuration is required")
+
+ instanceLabel = APP_CONFIG.get("APP_ENV_LABEL")
+ if not instanceLabel:
+ raise HTTPException(status_code=500, detail="APP_ENV_LABEL configuration is required")
+
return {
- "apiBaseUrl": APP_CONFIG.get("APP_API_URL", ""),
- "environment": APP_CONFIG.get("APP_ENV", "development"),
- "instanceLabel": APP_CONFIG.get("APP_ENV_LABEL", "Development"),
+ "apiBaseUrl": apiBaseUrl,
+ "environment": environment,
+ "instanceLabel": instanceLabel,
# Add other environment variables the frontend might need
}
@@ -63,3 +116,184 @@ async def options_route(request: Request, fullPath: str) -> Response:
@limiter.limit("30/minute")
async def favicon(request: Request) -> FileResponse:
return FileResponse(str(staticFolder / "favicon.ico"), media_type="image/x-icon")
+
+# ----------------------
+# Log Management
+# ----------------------
+
+@router.get("/api/logs/app")
+@limiter.limit("10/minute")
+async def download_app_log(request: Request, currentUser: User = Depends(getCurrentUser)) -> FileResponse:
+ """Download the current day's application log file"""
+ # Check if user has admin privileges
+ if not hasattr(currentUser, 'privilege') or currentUser.privilege not in ('admin', 'sysadmin'):
+ raise HTTPException(status_code=403, detail="Admin privileges required")
+
+ # Get log directory from config
+ logDir = APP_CONFIG.get("APP_LOGGING_LOG_DIR")
+ if not logDir:
+ raise HTTPException(status_code=500, detail="APP_LOGGING_LOG_DIR configuration is required")
+
+ if not os.path.isabs(logDir):
+ # If relative path, make it relative to the gateway directory
+ gatewayDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ logDir = os.path.join(gatewayDir, logDir)
+
+ # Get current date for log file
+ today = datetime.now().strftime("%Y%m%d")
+ logFile = os.path.join(logDir, f"log_app_{today}.log")
+
+ if not os.path.exists(logFile):
+ raise HTTPException(status_code=404, detail=f"Application log file for today not found: {logFile}")
+
+ return FileResponse(
+ path=logFile,
+ filename=f"log_app_{today}.log",
+ media_type="text/plain"
+ )
+
+@router.get("/api/logs/audit")
+@limiter.limit("10/minute")
+async def download_audit_log(request: Request, currentUser: User = Depends(getCurrentUser)) -> FileResponse:
+ """Download the current day's audit log file"""
+ # Check if user has admin privileges
+ if not hasattr(currentUser, 'privilege') or currentUser.privilege not in ('admin', 'sysadmin'):
+ raise HTTPException(status_code=403, detail="Admin privileges required")
+
+ # Get log directory from config
+ logDir = APP_CONFIG.get("APP_LOGGING_LOG_DIR")
+ if not logDir:
+ raise HTTPException(status_code=500, detail="APP_LOGGING_LOG_DIR configuration is required")
+
+ if not os.path.isabs(logDir):
+ # If relative path, make it relative to the gateway directory
+ gatewayDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ logDir = os.path.join(gatewayDir, logDir)
+
+ # Get current date for log file
+ today = datetime.now().strftime("%Y%m%d")
+ logFile = os.path.join(logDir, f"log_audit_{today}.log")
+
+ if not os.path.exists(logFile):
+ raise HTTPException(status_code=404, detail=f"Audit log file for today not found: {logFile}")
+
+ return FileResponse(
+ path=logFile,
+ filename=f"log_audit_{today}.log",
+ media_type="text/plain"
+ )
+
+# ----------------------
+# Database Management
+# ----------------------
+
+@router.get("/api/databases")
+@limiter.limit("10/minute")
+async def list_databases(request: Request, currentUser: User = Depends(getCurrentUser)) -> Dict[str, Any]:
+ """List available databases"""
+ # Check if user has admin privileges
+ if not hasattr(currentUser, 'privilege') or currentUser.privilege not in ('admin', 'sysadmin'):
+ raise HTTPException(status_code=403, detail="Admin privileges required")
+
+ try:
+ # Get configured database names from configuration
+ databases = []
+
+ # App database - required configuration
+ appDb = APP_CONFIG.get("DB_APP_DATABASE")
+ if not appDb:
+ raise HTTPException(status_code=500, detail="DB_APP_DATABASE configuration is required")
+ databases.append(appDb)
+
+ # Chat database - optional configuration
+ chatDb = APP_CONFIG.get("DB_CHAT_DATABASE")
+ if chatDb and chatDb not in databases:
+ databases.append(chatDb)
+
+ # Management database - optional configuration
+ managementDb = APP_CONFIG.get("DB_MANAGEMENT_DATABASE")
+ if managementDb and managementDb not in databases:
+ databases.append(managementDb)
+
+ return {"databases": databases}
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error listing databases: {e}")
+ raise HTTPException(status_code=500, detail="Failed to list databases")
+
+@router.get("/api/databases/{database_name}/tables")
+@limiter.limit("10/minute")
+async def list_tables(
+ request: Request,
+ database_name: str,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """List tables in a specific database"""
+ # Check if user has admin privileges
+ if not hasattr(currentUser, 'privilege') or currentUser.privilege not in ('admin', 'sysadmin'):
+ raise HTTPException(status_code=403, detail="Admin privileges required")
+
+ try:
+ # Get the appropriate interface based on database name
+ interface = get_interface_for_database(database_name, currentUser)
+
+ # Check if interface and database connection exist
+ if not interface or not interface.db:
+ raise HTTPException(status_code=500, detail="Database interface not available")
+
+ # Get tables from database
+ tables = interface.db.getTables()
+
+ return {"database": database_name, "tables": tables}
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error listing tables for database {database_name}: {e}")
+ raise HTTPException(status_code=500, detail=f"Failed to list tables for database {database_name}")
+
+@router.post("/api/databases/{database_name}/tables/drop")
+@limiter.limit("5/minute")
+async def drop_table(
+ request: Request,
+ database_name: str,
+ currentUser: User = Depends(getCurrentUser),
+ payload: Dict[str, Any] = Body(...)
+) -> Dict[str, Any]:
+ """Drop a specific table from a database"""
+ # Check if user has admin privileges
+ if not hasattr(currentUser, 'privilege') or currentUser.privilege not in ('admin', 'sysadmin'):
+ raise HTTPException(status_code=403, detail="Admin privileges required")
+
+ table_name = payload.get("table")
+ if not table_name:
+ raise HTTPException(status_code=400, detail="Table name is required")
+
+ try:
+ # Get the appropriate interface based on database name
+ interface = get_interface_for_database(database_name, currentUser)
+
+ # Check if interface and database connection exist
+ if not interface or not interface.db:
+ raise HTTPException(status_code=500, detail="Database interface not available")
+
+ # Check if table exists
+ tables = interface.db.getTables()
+ if table_name not in tables:
+ raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found in database '{database_name}'")
+
+ # Drop the table
+ with interface.db.connection.cursor() as cursor:
+ cursor.execute(f'DROP TABLE IF EXISTS "{table_name}" CASCADE')
+ interface.db.connection.commit()
+
+ logger.warning(f"Admin drop_table executed by {currentUser.id}: dropped table '{table_name}' from database '{database_name}'")
+ return {"message": f"Table '{table_name}' dropped successfully from database '{database_name}'"}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error dropping table {table_name} from database {database_name}: {e}")
+ if 'interface' in locals() and interface.db.connection:
+ interface.db.connection.rollback()
+ raise HTTPException(status_code=500, detail=f"Failed to drop table '{table_name}' from database '{database_name}'")
diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py
index 01452206..30c5601f 100644
--- a/modules/routes/routeDataConnections.py
+++ b/modules/routes/routeDataConnections.py
@@ -65,8 +65,15 @@ def get_token_status_for_connection(interface, connection_id: str) -> tuple[str,
return "none", None
current_time = get_utc_timestamp()
+
+ # Add 5 minute buffer for proactive refresh
+ buffer_time = 5 * 60 # 5 minutes in seconds
if expires_at <= current_time:
return "expired", expires_at
+ elif expires_at <= (current_time + buffer_time):
+ # Token expires soon - mark as active but log for proactive refresh
+ logger.debug(f"Token for connection {connection_id} expires soon (in {expires_at - current_time} seconds)")
+ return "active", expires_at
else:
return "active", expires_at
@@ -89,6 +96,7 @@ async def get_connections(
"""Get all connections for the current user
SECURITY: This endpoint is secure - users can only see their own connections.
+ Automatically refreshes expired OAuth tokens in the background.
"""
try:
interface = getInterface(currentUser)
@@ -97,6 +105,18 @@ async def get_connections(
# This prevents admin from seeing other users' connections and causing confusion
connections = interface.getUserConnections(currentUser.id)
+ # Perform silent token refresh for expired OAuth connections
+ try:
+ from modules.security.tokenRefreshService import token_refresh_service
+ refresh_result = await token_refresh_service.refresh_expired_tokens(currentUser.id)
+ if refresh_result.get("refreshed", 0) > 0:
+ logger.info(f"Silently refreshed {refresh_result['refreshed']} tokens for user {currentUser.id}")
+ # Re-fetch connections to get updated token status
+ connections = interface.getUserConnections(currentUser.id)
+ except Exception as e:
+ logger.warning(f"Silent token refresh failed for user {currentUser.id}: {str(e)}")
+ # Continue with original connections even if refresh fails
+
# Enhance each connection with token status information
enhanced_connections = []
for connection in connections:
diff --git a/modules/routes/routeDataFiles.py b/modules/routes/routeDataFiles.py
index f0feef25..3f11342b 100644
--- a/modules/routes/routeDataFiles.py
+++ b/modules/routes/routeDataFiles.py
@@ -17,8 +17,7 @@ from modules.security.auth import limiter, getCurrentUser
import modules.interfaces.interfaceComponentObjects as interfaceComponentObjects
from modules.interfaces.interfaceComponentModel import FileItem, FilePreview
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse, AttributeDefinition
-from modules.interfaces.interfaceAppModel import User, DataNeutraliserConfig, DataNeutralizerAttributes
-from modules.features.featureNeutralizePlayground import NeutralizationService
+from modules.interfaces.interfaceAppModel import User
# Configure logger
logger = logging.getLogger(__name__)
@@ -365,253 +364,4 @@ async def preview_file(
detail=f"Error previewing file: {str(e)}"
)
-# Data Neutralization endpoints
-
-@router.get("/neutralization/config", response_model=DataNeutraliserConfig)
-@limiter.limit("30/minute")
-async def get_neutralization_config(
- request: Request,
- currentUser: User = Depends(getCurrentUser)
-) -> DataNeutraliserConfig:
- """Get data neutralization configuration"""
- try:
- service = NeutralizationService(currentUser)
- config = service.get_config()
-
- if not config:
- # Return default config instead of 404
- return DataNeutraliserConfig(
- mandateId=currentUser.mandateId,
- userId=currentUser.id,
- enabled=True,
- namesToParse="",
- sharepointSourcePath="",
- sharepointTargetPath=""
- )
-
- return config
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error getting neutralization config: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error getting neutralization config: {str(e)}"
- )
-
-@router.post("/neutralization/config", response_model=DataNeutraliserConfig)
-@limiter.limit("10/minute")
-async def save_neutralization_config(
- request: Request,
- config_data: Dict[str, Any] = Body(...),
- currentUser: User = Depends(getCurrentUser)
-) -> DataNeutraliserConfig:
- """Save or update data neutralization configuration"""
- try:
- service = NeutralizationService(currentUser)
- config = service.save_config(config_data)
-
- return config
-
- except Exception as e:
- logger.error(f"Error saving neutralization config: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error saving neutralization config: {str(e)}"
- )
-
-@router.post("/neutralization/neutralize-text", response_model=Dict[str, Any])
-@limiter.limit("20/minute")
-async def neutralize_text(
- request: Request,
- text_data: Dict[str, Any] = Body(...),
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, Any]:
- """Neutralize text content"""
- try:
- text = text_data.get("text", "")
- file_id = text_data.get("fileId")
-
- if not text:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="Text content is required"
- )
-
- service = NeutralizationService(currentUser)
- result = service.neutralize_text(text, file_id)
-
- return result
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error neutralizing text: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error neutralizing text: {str(e)}"
- )
-
-@router.post("/neutralization/resolve-text", response_model=Dict[str, str])
-@limiter.limit("20/minute")
-async def resolve_text(
- request: Request,
- text_data: Dict[str, str] = Body(...),
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, str]:
- """Resolve UIDs in neutralized text back to original text"""
- try:
- text = text_data.get("text", "")
-
- if not text:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="Text content is required"
- )
-
- service = NeutralizationService(currentUser)
- resolved_text = service.resolve_text(text)
-
- return {"resolved_text": resolved_text}
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error resolving text: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error resolving text: {str(e)}"
- )
-
-@router.get("/neutralization/attributes", response_model=List[DataNeutralizerAttributes])
-@limiter.limit("30/minute")
-async def get_neutralization_attributes(
- request: Request,
- fileId: Optional[str] = Query(None, description="Filter by file ID"),
- currentUser: User = Depends(getCurrentUser)
-) -> List[DataNeutralizerAttributes]:
- """Get neutralization attributes, optionally filtered by file ID"""
- try:
- service = NeutralizationService(currentUser)
- attributes = service.get_attributes(fileId)
-
- return attributes
-
- except Exception as e:
- logger.error(f"Error getting neutralization attributes: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error getting neutralization attributes: {str(e)}"
- )
-
-@router.post("/neutralization/process-sharepoint", response_model=Dict[str, Any])
-@limiter.limit("5/minute")
-async def process_sharepoint_files(
- request: Request,
- paths_data: Dict[str, str] = Body(...),
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, Any]:
- """Process files from SharePoint source path and store neutralized files in target path"""
- try:
- source_path = paths_data.get("sourcePath", "")
- target_path = paths_data.get("targetPath", "")
-
- if not source_path or not target_path:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="Both source and target paths are required"
- )
-
- service = NeutralizationService(currentUser)
- result = await service.process_sharepoint_files(source_path, target_path)
-
- return result
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error processing SharePoint files: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error processing SharePoint files: {str(e)}"
- )
-
-@router.post("/neutralization/batch-process", response_model=Dict[str, Any])
-@limiter.limit("10/minute")
-async def batch_process_files(
- request: Request,
- files_data: List[Dict[str, Any]] = Body(...),
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, Any]:
- """Process multiple files for neutralization"""
- try:
- if not files_data:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="Files data is required"
- )
-
- service = NeutralizationService(currentUser)
- result = service.batch_neutralize_files(files_data)
-
- return result
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error batch processing files: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error batch processing files: {str(e)}"
- )
-
-@router.get("/neutralization/stats", response_model=Dict[str, Any])
-@limiter.limit("30/minute")
-async def get_neutralization_stats(
- request: Request,
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, Any]:
- """Get neutralization processing statistics"""
- try:
- service = NeutralizationService(currentUser)
- stats = service.get_processing_stats()
-
- return stats
-
- except Exception as e:
- logger.error(f"Error getting neutralization stats: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error getting neutralization stats: {str(e)}"
- )
-
-@router.delete("/neutralization/attributes/{fileId}", response_model=Dict[str, str])
-@limiter.limit("10/minute")
-async def cleanup_file_attributes(
- request: Request,
- fileId: str = Path(..., description="File ID to cleanup attributes for"),
- currentUser: User = Depends(getCurrentUser)
-) -> Dict[str, str]:
- """Clean up neutralization attributes for a specific file"""
- try:
- service = NeutralizationService(currentUser)
- success = service.cleanup_file_attributes(fileId)
-
- if success:
- return {"message": f"Successfully cleaned up attributes for file {fileId}"}
- else:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to cleanup file attributes"
- )
-
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error cleaning up file attributes: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error cleaning up file attributes: {str(e)}"
- )
diff --git a/modules/routes/routeDataNeutralization.py b/modules/routes/routeDataNeutralization.py
new file mode 100644
index 00000000..697c6f1c
--- /dev/null
+++ b/modules/routes/routeDataNeutralization.py
@@ -0,0 +1,274 @@
+from fastapi import APIRouter, HTTPException, Depends, Path, Request, status, Query, Body
+from typing import List, Dict, Any, Optional
+import logging
+
+# Import auth module
+from modules.security.auth import limiter, getCurrentUser
+
+# Import interfaces
+from modules.interfaces.interfaceAppModel import User, DataNeutraliserConfig, DataNeutralizerAttributes
+from modules.features.featureNeutralizePlayground import NeutralizationService
+
+# Configure logger
+logger = logging.getLogger(__name__)
+
+# Create router for neutralization endpoints
+router = APIRouter(
+ prefix="/api/neutralization",
+ tags=["Data Neutralisation"],
+ responses={
+ 404: {"description": "Not found"},
+ 400: {"description": "Bad request"},
+ 401: {"description": "Unauthorized"},
+ 403: {"description": "Forbidden"},
+ 500: {"description": "Internal server error"}
+ }
+)
+
+@router.get("/config", response_model=DataNeutraliserConfig)
+@limiter.limit("30/minute")
+async def get_neutralization_config(
+ request: Request,
+ currentUser: User = Depends(getCurrentUser)
+) -> DataNeutraliserConfig:
+ """Get data neutralization configuration"""
+ try:
+ service = NeutralizationService(currentUser)
+ config = service.get_config()
+
+ if not config:
+ # Return default config instead of 404
+ return DataNeutraliserConfig(
+ mandateId=currentUser.mandateId,
+ userId=currentUser.id,
+ enabled=True,
+ namesToParse="",
+ sharepointSourcePath="",
+ sharepointTargetPath=""
+ )
+
+ return config
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error getting neutralization config: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error getting neutralization config: {str(e)}"
+ )
+
+@router.post("/config", response_model=DataNeutraliserConfig)
+@limiter.limit("10/minute")
+async def save_neutralization_config(
+ request: Request,
+ config_data: Dict[str, Any] = Body(...),
+ currentUser: User = Depends(getCurrentUser)
+) -> DataNeutraliserConfig:
+ """Save or update data neutralization configuration"""
+ try:
+ service = NeutralizationService(currentUser)
+ config = service.save_config(config_data)
+
+ return config
+
+ except Exception as e:
+ logger.error(f"Error saving neutralization config: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error saving neutralization config: {str(e)}"
+ )
+
+@router.post("/neutralize-text", response_model=Dict[str, Any])
+@limiter.limit("20/minute")
+async def neutralize_text(
+ request: Request,
+ text_data: Dict[str, Any] = Body(...),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Neutralize text content"""
+ try:
+ text = text_data.get("text", "")
+ file_id = text_data.get("fileId")
+
+ if not text:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Text content is required"
+ )
+
+ service = NeutralizationService(currentUser)
+ result = service.neutralize_text(text, file_id)
+
+ return result
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error neutralizing text: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error neutralizing text: {str(e)}"
+ )
+
+@router.post("/resolve-text", response_model=Dict[str, str])
+@limiter.limit("20/minute")
+async def resolve_text(
+ request: Request,
+ text_data: Dict[str, str] = Body(...),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, str]:
+ """Resolve UIDs in neutralized text back to original text"""
+ try:
+ text = text_data.get("text", "")
+
+ if not text:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Text content is required"
+ )
+
+ service = NeutralizationService(currentUser)
+ resolved_text = service.resolve_text(text)
+
+ return {"resolved_text": resolved_text}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error resolving text: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error resolving text: {str(e)}"
+ )
+
+@router.get("/attributes", response_model=List[DataNeutralizerAttributes])
+@limiter.limit("30/minute")
+async def get_neutralization_attributes(
+ request: Request,
+ fileId: Optional[str] = Query(None, description="Filter by file ID"),
+ currentUser: User = Depends(getCurrentUser)
+) -> List[DataNeutralizerAttributes]:
+ """Get neutralization attributes, optionally filtered by file ID"""
+ try:
+ service = NeutralizationService(currentUser)
+ attributes = service.get_attributes(fileId)
+
+ return attributes
+
+ except Exception as e:
+ logger.error(f"Error getting neutralization attributes: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error getting neutralization attributes: {str(e)}"
+ )
+
+@router.post("/process-sharepoint", response_model=Dict[str, Any])
+@limiter.limit("5/minute")
+async def process_sharepoint_files(
+ request: Request,
+ paths_data: Dict[str, str] = Body(...),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Process files from SharePoint source path and store neutralized files in target path"""
+ try:
+ source_path = paths_data.get("sourcePath", "")
+ target_path = paths_data.get("targetPath", "")
+
+ if not source_path or not target_path:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Both source and target paths are required"
+ )
+
+ service = NeutralizationService(currentUser)
+ result = await service.process_sharepoint_files(source_path, target_path)
+
+ return result
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error processing SharePoint files: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error processing SharePoint files: {str(e)}"
+ )
+
+@router.post("/batch-process", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
+async def batch_process_files(
+ request: Request,
+ files_data: List[Dict[str, Any]] = Body(...),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Process multiple files for neutralization"""
+ try:
+ if not files_data:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Files data is required"
+ )
+
+ service = NeutralizationService(currentUser)
+ result = service.batch_neutralize_files(files_data)
+
+ return result
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error batch processing files: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error batch processing files: {str(e)}"
+ )
+
+@router.get("/stats", response_model=Dict[str, Any])
+@limiter.limit("30/minute")
+async def get_neutralization_stats(
+ request: Request,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Get neutralization processing statistics"""
+ try:
+ service = NeutralizationService(currentUser)
+ stats = service.get_processing_stats()
+
+ return stats
+
+ except Exception as e:
+ logger.error(f"Error getting neutralization stats: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error getting neutralization stats: {str(e)}"
+ )
+
+@router.delete("/attributes/{fileId}", response_model=Dict[str, str])
+@limiter.limit("10/minute")
+async def cleanup_file_attributes(
+ request: Request,
+ fileId: str = Path(..., description="File ID to cleanup attributes for"),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, str]:
+ """Clean up neutralization attributes for a specific file"""
+ try:
+ service = NeutralizationService(currentUser)
+ success = service.cleanup_file_attributes(fileId)
+
+ if success:
+ return {"message": f"Successfully cleaned up attributes for file {fileId}"}
+ else:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to cleanup file attributes"
+ )
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error cleaning up file attributes: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Error cleaning up file attributes: {str(e)}"
+ )
diff --git a/modules/routes/routeDataUsers.py b/modules/routes/routeDataUsers.py
index 9d8be813..01fb41f1 100644
--- a/modules/routes/routeDataUsers.py
+++ b/modules/routes/routeDataUsers.py
@@ -132,6 +132,165 @@ async def update_user(
return updatedUser
+@router.post("/{userId}/reset-password")
+@limiter.limit("5/minute")
+async def reset_user_password(
+ request: Request,
+ userId: str = Path(..., description="ID of the user to reset password for"),
+ newPassword: str = Body(..., embed=True),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Reset user password (Admin only)"""
+ try:
+ # Check if current user is admin
+ if currentUser.privilege != UserPrivilege.ADMIN:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Only administrators can reset passwords"
+ )
+
+ # Get user interface
+ appInterface = getInterface(currentUser)
+
+ # Get target user
+ target_user = appInterface.getUserById(userId)
+ if not target_user:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="User not found"
+ )
+
+ # Validate password strength
+ if len(newPassword) < 8:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Password must be at least 8 characters long"
+ )
+
+ # Reset password
+ success = appInterface.resetUserPassword(userId, newPassword)
+ if not success:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to reset password"
+ )
+
+ # SECURITY: Automatically revoke all tokens for the user after password reset
+ try:
+ from modules.interfaces.interfaceAppModel import AuthAuthority
+ revoked_count = appInterface.revokeTokensByUser(
+ userId=userId,
+ authority=None, # Revoke all authorities
+ mandateId=None, # Revoke across all mandates
+ revokedBy=currentUser.id,
+ reason="password_reset"
+ )
+ logger.info(f"Revoked {revoked_count} tokens for user {userId} after password reset")
+ except Exception as e:
+ logger.error(f"Failed to revoke tokens after password reset for user {userId}: {str(e)}")
+ # Don't fail the password reset if token revocation fails
+
+ # Log password reset
+ try:
+ from modules.shared.auditLogger import audit_logger
+ audit_logger.log_security_event(
+ user_id=str(currentUser.id),
+ mandate_id=str(currentUser.mandateId),
+ action="password_reset",
+ details=f"Reset password for user {userId}"
+ )
+ except Exception:
+ pass
+
+ return {
+ "message": "Password reset successfully",
+ "user_id": userId
+ }
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error resetting password: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Password reset failed: {str(e)}"
+ )
+
+@router.post("/change-password")
+@limiter.limit("5/minute")
+async def change_password(
+ request: Request,
+ currentPassword: str = Body(..., embed=True),
+ newPassword: str = Body(..., embed=True),
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Change current user's password"""
+ try:
+ # Get user interface
+ appInterface = getInterface(currentUser)
+
+ # Verify current password
+ if not appInterface.verifyPassword(currentPassword, currentUser.passwordHash):
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Current password is incorrect"
+ )
+
+ # Validate new password strength
+ if len(newPassword) < 8:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="New password must be at least 8 characters long"
+ )
+
+ # Change password
+ success = appInterface.resetUserPassword(str(currentUser.id), newPassword)
+ if not success:
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to change password"
+ )
+
+ # SECURITY: Automatically revoke all tokens for the user after password change
+ try:
+ from modules.interfaces.interfaceAppModel import AuthAuthority
+ revoked_count = appInterface.revokeTokensByUser(
+ userId=str(currentUser.id),
+ authority=None, # Revoke all authorities
+ mandateId=None, # Revoke across all mandates
+ revokedBy=currentUser.id,
+ reason="password_change"
+ )
+ logger.info(f"Revoked {revoked_count} tokens for user {currentUser.id} after password change")
+ except Exception as e:
+ logger.error(f"Failed to revoke tokens after password change for user {currentUser.id}: {str(e)}")
+ # Don't fail the password change if token revocation fails
+
+ # Log password change
+ try:
+ from modules.shared.auditLogger import audit_logger
+ audit_logger.log_security_event(
+ user_id=str(currentUser.id),
+ mandate_id=str(currentUser.mandateId),
+ action="password_change",
+ details="User changed their own password"
+ )
+ except Exception:
+ pass
+
+ return {
+ "message": "Password changed successfully. Please log in again with your new password."
+ }
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error changing password: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Password change failed: {str(e)}"
+ )
+
@router.delete("/{userId}", response_model=Dict[str, Any])
@limiter.limit("10/minute")
async def delete_user(
diff --git a/modules/routes/routeSecurityLocal.py b/modules/routes/routeSecurityLocal.py
index 15f998f9..7b396f77 100644
--- a/modules/routes/routeSecurityLocal.py
+++ b/modules/routes/routeSecurityLocal.py
@@ -13,7 +13,7 @@ from jose import jwt
from pydantic import BaseModel
# Import auth modules
-from modules.security.auth import createAccessToken, getCurrentUser, limiter, SECRET_KEY, ALGORITHM
+from modules.security.auth import createAccessToken, createAccessTokenWithCookie, setRefreshTokenCookie, getCurrentUser, limiter, SECRET_KEY, ALGORITHM
from modules.interfaces.interfaceAppObjects import getInterface, getRootInterface
from modules.interfaces.interfaceAppModel import User, UserInDB, AuthAuthority, UserPrivilege, Token
from modules.shared.attributeUtils import ModelMixin
@@ -38,6 +38,7 @@ router = APIRouter(
@limiter.limit("30/minute")
async def login(
request: Request,
+ response: Response,
formData: OAuth2PasswordRequestForm = Depends(),
) -> Dict[str, Any]:
"""Get access token for local user authentication"""
@@ -90,24 +91,25 @@ async def login(
session_id = str(uuid.uuid4())
token_data["sid"] = session_id
- # Create access token
- access_token, expires_at = createAccessToken(token_data)
- if not access_token:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to create access token"
- )
+ # Create access token with httpOnly cookie
+ access_token = createAccessTokenWithCookie(token_data, response)
+
+ # Create refresh token with httpOnly cookie
+ refresh_token = setRefreshTokenCookie(token_data, response)
+
+ # Get expiration time for response
+ try:
+ payload = jwt.decode(access_token, SECRET_KEY, algorithms=[ALGORITHM])
+ expires_at = datetime.fromtimestamp(payload.get("exp"))
+ except Exception as e:
+ logger.error(f"Failed to decode access token: {str(e)}")
+ raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to finalize token")
# Get user-specific interface for token operations
userInterface = getInterface(user)
- # Decode JWT to get jti for DB persistence
- try:
- payload = jwt.decode(access_token, SECRET_KEY, algorithms=[ALGORITHM])
- jti = payload.get("jti")
- except Exception as e:
- logger.error(f"Failed to decode created JWT: {str(e)}")
- raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to finalize token")
+ # Get jti from already decoded payload
+ jti = payload.get("jti")
# Create token
token = Token(
@@ -137,12 +139,12 @@ async def login(
# Don't fail if audit logging fails
pass
- # Create response data
+ # Create response data (tokens are now in httpOnly cookies)
response_data = {
"type": "local_auth_success",
- "access_token": access_token,
- "token_data": token.dict(),
- "authenticationAuthority": "local"
+ "message": "Login successful - tokens set in httpOnly cookies",
+ "authenticationAuthority": "local",
+ "expires_at": expires_at.isoformat()
}
return response_data
@@ -253,20 +255,85 @@ async def read_user_me(
detail=f"Failed to get current user: {str(e)}"
)
+@router.post("/refresh")
+@limiter.limit("60/minute")
+async def refresh_token(
+ request: Request,
+ response: Response,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """Refresh access token using refresh token from cookie"""
+ try:
+ # Get refresh token from cookie
+ refresh_token = request.cookies.get('refresh_token')
+ if not refresh_token:
+ raise HTTPException(status_code=401, detail="No refresh token found")
+
+ # Validate refresh token
+ try:
+ payload = jwt.decode(refresh_token, SECRET_KEY, algorithms=[ALGORITHM])
+ if payload.get("type") != "refresh":
+ raise HTTPException(status_code=401, detail="Invalid refresh token type")
+ except jwt.ExpiredSignatureError:
+ raise HTTPException(status_code=401, detail="Refresh token expired")
+ except jwt.JWTError:
+ raise HTTPException(status_code=401, detail="Invalid refresh token")
+
+ # Create new token data
+ token_data = {
+ "sub": currentUser.username,
+ "mandateId": str(currentUser.mandateId),
+ "userId": str(currentUser.id),
+ "authenticationAuthority": currentUser.authenticationAuthority
+ }
+
+ # Create new access token with cookie
+ access_token = createAccessTokenWithCookie(token_data, response)
+
+ # Get expiration time
+ try:
+ payload = jwt.decode(access_token, SECRET_KEY, algorithms=[ALGORITHM])
+ expires_at = datetime.fromtimestamp(payload.get("exp"))
+ except Exception as e:
+ logger.error(f"Failed to decode new access token: {str(e)}")
+ raise HTTPException(status_code=500, detail="Failed to create new token")
+
+ return {
+ "type": "token_refresh_success",
+ "message": "Token refreshed successfully",
+ "expires_at": expires_at.isoformat()
+ }
+
+ except HTTPException as e:
+ # If it's a 503 error (service unavailable due to missing token table), return it as-is
+ if e.status_code == 503:
+ raise
+ # For other HTTP exceptions, re-raise them
+ raise
+ except Exception as e:
+ logger.error(f"Token refresh error: {str(e)}")
+ raise HTTPException(status_code=500, detail="Token refresh failed")
+
@router.post("/logout")
@limiter.limit("30/minute")
-async def logout(request: Request, currentUser: User = Depends(getCurrentUser)) -> JSONResponse:
+async def logout(request: Request, response: Response, currentUser: User = Depends(getCurrentUser)) -> JSONResponse:
"""Logout from local authentication"""
try:
# Get user interface with current user context
appInterface = getInterface(currentUser)
- # Read bearer token from Authorization header to obtain session id / jti
- auth_header = request.headers.get("Authorization")
- if not auth_header or not auth_header.lower().startswith("bearer "):
- raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Missing Authorization header")
- raw_token = auth_header.split(" ", 1)[1].strip()
+
+ # Get token from cookie or Authorization header
+ token = request.cookies.get('auth_token')
+ if not token:
+ auth_header = request.headers.get("Authorization")
+ if auth_header and auth_header.lower().startswith("bearer "):
+ token = auth_header.split(" ", 1)[1].strip()
+
+ if not token:
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No token found")
+
try:
- payload = jwt.decode(raw_token, SECRET_KEY, algorithms=[ALGORITHM])
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
session_id = payload.get("sid") or payload.get("sessionId")
jti = payload.get("jti")
except Exception as e:
@@ -293,8 +360,12 @@ async def logout(request: Request, currentUser: User = Depends(getCurrentUser))
# Don't fail if audit logging fails
pass
+ # Clear httpOnly cookies
+ response.delete_cookie(key="auth_token", httponly=True, samesite="strict")
+ response.delete_cookie(key="refresh_token", httponly=True, samesite="strict")
+
return JSONResponse({
- "message": "Successfully logged out",
+ "message": "Successfully logged out - cookies cleared",
"revokedTokens": revoked
})
diff --git a/modules/security/auth.py b/modules/security/auth.py
index 4ada086c..2ccaaa74 100644
--- a/modules/security/auth.py
+++ b/modules/security/auth.py
@@ -6,8 +6,8 @@ Handles JWT-based authentication, token generation, and user context.
from datetime import datetime, timedelta, timezone
import uuid
from typing import Optional, Dict, Any, Tuple
-from fastapi import Depends, HTTPException, status, Request
-from fastapi.security import OAuth2PasswordBearer
+from fastapi import Depends, HTTPException, status, Request, Response
+from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from jose import JWTError, jwt
import logging
from slowapi import Limiter
@@ -24,8 +24,29 @@ ALGORITHM = APP_CONFIG.get("Auth_ALGORITHM")
ACCESS_TOKEN_EXPIRE_MINUTES = int(APP_CONFIG.get("APP_TOKEN_EXPIRY"))
REFRESH_TOKEN_EXPIRE_DAYS = int(APP_CONFIG.get("APP_REFRESH_TOKEN_EXPIRY", "7"))
-# OAuth2 Setup
-oauth2Scheme = OAuth2PasswordBearer(tokenUrl="token")
+# Cookie-based Authentication Setup
+class CookieAuth(HTTPBearer):
+ """Cookie-based authentication that checks httpOnly cookies first, then Authorization header"""
+ def __init__(self, auto_error: bool = True):
+ super().__init__(auto_error=auto_error)
+
+ async def __call__(self, request: Request) -> Optional[str]:
+ # 1. Check httpOnly cookie first (preferred method)
+ token = request.cookies.get('auth_token')
+ if token:
+ return token
+
+ # 2. Fallback to Authorization header for API calls
+ authorization = request.headers.get("Authorization")
+ if authorization and authorization.startswith("Bearer "):
+ return authorization.split(" ")[1]
+
+ if self.auto_error:
+ raise HTTPException(status_code=401, detail="Not authenticated")
+ return None
+
+# Initialize cookie-based auth
+cookieAuth = CookieAuth(auto_error=False)
# Rate Limiter
limiter = Limiter(key_func=get_remote_address)
@@ -59,7 +80,82 @@ def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> T
return encodedJwt, expire
-def _getUserBase(token: str = Depends(oauth2Scheme)) -> User:
+def createAccessTokenWithCookie(data: dict, response: Response, expiresDelta: Optional[timedelta] = None) -> str:
+ """
+ Creates a JWT Access Token and sets it as an httpOnly cookie.
+
+ Args:
+ data: Data to encode (usually user ID or username)
+ response: FastAPI Response object to set cookie
+ expiresDelta: Validity duration of the token (optional)
+
+ Returns:
+ JWT Token as string
+ """
+ access_token, expires_at = createAccessToken(data, expiresDelta)
+
+ # Set httpOnly cookie
+ response.set_cookie(
+ key="auth_token",
+ value=access_token,
+ httponly=True,
+ secure=True, # HTTPS only in production
+ samesite="strict",
+ max_age=int(expiresDelta.total_seconds()) if expiresDelta else ACCESS_TOKEN_EXPIRE_MINUTES * 60
+ )
+
+ return access_token
+
+def createRefreshToken(data: dict) -> Tuple[str, datetime]:
+ """
+ Creates a JWT Refresh Token with longer expiration.
+
+ Args:
+ data: Data to encode (usually user ID or username)
+
+ Returns:
+ Tuple of (JWT Refresh Token as string, expiration datetime)
+ """
+ toEncode = data.copy()
+ # Ensure a token id (jti) exists for revocation tracking
+ if "jti" not in toEncode or not toEncode.get("jti"):
+ toEncode["jti"] = str(uuid.uuid4())
+
+ # Add refresh token type
+ toEncode["type"] = "refresh"
+
+ expire = get_utc_now() + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
+ toEncode.update({"exp": expire})
+ encodedJwt = jwt.encode(toEncode, SECRET_KEY, algorithm=ALGORITHM)
+
+ return encodedJwt, expire
+
+def setRefreshTokenCookie(data: dict, response: Response) -> str:
+ """
+ Creates a JWT Refresh Token and sets it as an httpOnly cookie.
+
+ Args:
+ data: Data to encode (usually user ID or username)
+ response: FastAPI Response object to set cookie
+
+ Returns:
+ JWT Refresh Token as string
+ """
+ refresh_token, expires_at = createRefreshToken(data)
+
+ # Set httpOnly cookie for refresh token
+ response.set_cookie(
+ key="refresh_token",
+ value=refresh_token,
+ httponly=True,
+ secure=True, # HTTPS only in production
+ samesite="strict",
+ max_age=REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60 # Days to seconds
+ )
+
+ return refresh_token
+
+def _getUserBase(token: str = Depends(cookieAuth)) -> User:
"""
Extracts and validates the current user from the JWT token.
@@ -138,7 +234,14 @@ def _getUserBase(token: str = Depends(oauth2Scheme)) -> User:
db_tokens = appInterface.db.getRecordset(
Token, recordFilter={"id": tokenId}
)
- except Exception:
+ except Exception as e:
+ # Check if this is a table not found error (token table was deleted)
+ if "does not exist" in str(e).lower() or "relation" in str(e).lower():
+ logger.error("Token table does not exist - database may have been reset")
+ raise HTTPException(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ detail="Authentication service temporarily unavailable. Please contact administrator."
+ )
db_tokens = []
if db_tokens:
diff --git a/modules/security/csrf.py b/modules/security/csrf.py
new file mode 100644
index 00000000..030ab665
--- /dev/null
+++ b/modules/security/csrf.py
@@ -0,0 +1,96 @@
+"""
+CSRF Protection Middleware for PowerOn Gateway
+
+This module provides CSRF protection for state-changing operations.
+"""
+
+import logging
+from fastapi import Request, HTTPException, status
+from starlette.middleware.base import BaseHTTPMiddleware
+from typing import Set
+
+logger = logging.getLogger(__name__)
+
+class CSRFMiddleware(BaseHTTPMiddleware):
+ """
+ CSRF protection middleware that validates CSRF tokens for state-changing operations.
+ """
+
+ def __init__(self, app, exempt_paths: Set[str] = None):
+ super().__init__(app)
+ # Paths that are exempt from CSRF protection
+ self.exempt_paths = exempt_paths or {
+ "/api/local/login",
+ "/api/local/register",
+ "/api/msft/login",
+ "/api/google/login",
+ "/api/msft/callback",
+ "/api/google/callback"
+ }
+
+ # State-changing HTTP methods that require CSRF protection
+ self.protected_methods = {"POST", "PUT", "DELETE", "PATCH"}
+
+ async def dispatch(self, request: Request, call_next):
+ """
+ Check CSRF token for state-changing operations.
+ """
+ # Skip CSRF check for exempt paths
+ if request.url.path in self.exempt_paths:
+ return await call_next(request)
+
+ # Skip CSRF check for non-state-changing methods
+ if request.method not in self.protected_methods:
+ return await call_next(request)
+
+ # Skip CSRF check for OPTIONS requests (CORS preflight)
+ if request.method == "OPTIONS":
+ return await call_next(request)
+
+ # Get CSRF token from header
+ csrf_token = request.headers.get("X-CSRF-Token")
+ if not csrf_token:
+ logger.warning(f"CSRF token missing for {request.method} {request.url.path}")
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="CSRF token missing"
+ )
+
+ # Validate CSRF token format (basic validation)
+ if not self._is_valid_csrf_token(csrf_token):
+ logger.warning(f"Invalid CSRF token format for {request.method} {request.url.path}")
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Invalid CSRF token format"
+ )
+
+ # Additional CSRF validation could be added here:
+ # - Check token against session
+ # - Validate token expiration
+ # - Verify token origin
+
+ return await call_next(request)
+
+ def _is_valid_csrf_token(self, token: str) -> bool:
+ """
+ Basic validation of CSRF token format.
+
+ Args:
+ token: The CSRF token to validate
+
+ Returns:
+ bool: True if token format is valid
+ """
+ if not token or not isinstance(token, str):
+ return False
+
+ # Basic format validation (hex string, reasonable length)
+ if len(token) < 16 or len(token) > 64:
+ return False
+
+ # Check if token contains only valid hex characters
+ try:
+ int(token, 16)
+ return True
+ except ValueError:
+ return False
diff --git a/modules/security/tokenRefreshMiddleware.py b/modules/security/tokenRefreshMiddleware.py
new file mode 100644
index 00000000..89f21858
--- /dev/null
+++ b/modules/security/tokenRefreshMiddleware.py
@@ -0,0 +1,191 @@
+"""
+Token Refresh Middleware for PowerOn Gateway
+
+This middleware automatically refreshes expired OAuth tokens
+when API endpoints are accessed, providing seamless user experience.
+"""
+
+import logging
+from fastapi import Request, Response
+from starlette.middleware.base import BaseHTTPMiddleware
+from starlette.responses import Response as StarletteResponse
+from typing import Callable
+import asyncio
+from modules.security.tokenRefreshService import token_refresh_service
+
+logger = logging.getLogger(__name__)
+
+class TokenRefreshMiddleware(BaseHTTPMiddleware):
+ """
+ Middleware that automatically refreshes expired OAuth tokens
+ when API endpoints are accessed.
+ """
+
+ def __init__(self, app, enabled: bool = True):
+ super().__init__(app)
+ self.enabled = enabled
+ self.refresh_endpoints = {
+ '/api/connections',
+ '/api/files',
+ '/api/chat',
+ '/api/msft',
+ '/api/google'
+ }
+
+ async def dispatch(self, request: Request, call_next: Callable) -> Response:
+ """
+ Process request and refresh tokens if needed
+ """
+ if not self.enabled:
+ return await call_next(request)
+
+ # Check if this is an endpoint that might need token refresh
+ if not self._should_check_tokens(request):
+ return await call_next(request)
+
+ # Extract user ID from request (if available)
+ user_id = self._extract_user_id(request)
+ if not user_id:
+ return await call_next(request)
+
+ try:
+ # Perform silent token refresh in background
+ # Don't wait for completion to avoid slowing down the request
+ asyncio.create_task(self._silent_refresh_tokens(user_id))
+
+ except Exception as e:
+ logger.warning(f"Error scheduling token refresh: {str(e)}")
+ # Continue with request even if refresh scheduling fails
+
+ # Process the original request
+ response = await call_next(request)
+ return response
+
+ def _should_check_tokens(self, request: Request) -> bool:
+ """
+ Check if this request should trigger token refresh
+ """
+ path = request.url.path
+
+ # Only check specific API endpoints
+ for endpoint in self.refresh_endpoints:
+ if path.startswith(endpoint):
+ return True
+
+ return False
+
+ def _extract_user_id(self, request: Request) -> str:
+ """
+ Extract user ID from request context
+ """
+ try:
+ # Try to get user from request state (set by auth middleware)
+ if hasattr(request.state, 'user_id'):
+ return request.state.user_id
+
+ # Try to get from JWT token in cookies or headers
+ # This is a fallback if user state is not available
+ return None
+
+ except Exception as e:
+ logger.debug(f"Could not extract user ID: {str(e)}")
+ return None
+
+ async def _silent_refresh_tokens(self, user_id: str) -> None:
+ """
+ Perform silent token refresh for the user
+ """
+ try:
+ logger.debug(f"Starting silent token refresh for user {user_id}")
+
+ # Refresh expired tokens
+ result = await token_refresh_service.refresh_expired_tokens(user_id)
+
+ if result.get("refreshed", 0) > 0:
+ logger.info(f"Silently refreshed {result['refreshed']} tokens for user {user_id}")
+
+ except Exception as e:
+ logger.error(f"Error in silent token refresh for user {user_id}: {str(e)}")
+
+class ProactiveTokenRefreshMiddleware(BaseHTTPMiddleware):
+ """
+ Middleware that proactively refreshes tokens before they expire
+ """
+
+ def __init__(self, app, enabled: bool = True, check_interval_minutes: int = 5):
+ super().__init__(app)
+ self.enabled = enabled
+ self.check_interval_minutes = check_interval_minutes
+ self.last_check = {}
+
+ async def dispatch(self, request: Request, call_next: Callable) -> Response:
+ """
+ Process request and check for proactive refresh needs
+ """
+ if not self.enabled:
+ return await call_next(request)
+
+ # Extract user ID from request
+ user_id = self._extract_user_id(request)
+ if not user_id:
+ return await call_next(request)
+
+ # Check if we need to do proactive refresh
+ if self._should_check_proactive_refresh(user_id):
+ try:
+ # Perform proactive refresh in background
+ asyncio.create_task(self._proactive_refresh_tokens(user_id))
+ self.last_check[user_id] = get_utc_timestamp()
+
+ except Exception as e:
+ logger.warning(f"Error scheduling proactive refresh: {str(e)}")
+
+ # Process the original request
+ response = await call_next(request)
+ return response
+
+ def _extract_user_id(self, request: Request) -> str:
+ """
+ Extract user ID from request context
+ """
+ try:
+ if hasattr(request.state, 'user_id'):
+ return request.state.user_id
+ return None
+ except Exception:
+ return None
+
+ def _should_check_proactive_refresh(self, user_id: str) -> bool:
+ """
+ Check if we should perform proactive refresh for this user
+ """
+ try:
+ from modules.shared.timezoneUtils import get_utc_timestamp
+ current_time = get_utc_timestamp()
+ last_check = self.last_check.get(user_id, 0)
+
+ # Check every 5 minutes
+ return (current_time - last_check) > (self.check_interval_minutes * 60)
+
+ except Exception:
+ return False
+
+ async def _proactive_refresh_tokens(self, user_id: str) -> None:
+ """
+ Perform proactive token refresh for the user
+ """
+ try:
+ logger.debug(f"Starting proactive token refresh for user {user_id}")
+
+ result = await token_refresh_service.proactive_refresh(user_id)
+
+ if result.get("refreshed", 0) > 0:
+ logger.info(f"Proactively refreshed {result['refreshed']} tokens for user {user_id}")
+
+ except Exception as e:
+ logger.error(f"Error in proactive token refresh for user {user_id}: {str(e)}")
+
+def get_utc_timestamp():
+ """Get current UTC timestamp"""
+ from modules.shared.timezoneUtils import get_utc_timestamp as _get_utc_timestamp
+ return _get_utc_timestamp()
diff --git a/modules/security/tokenRefreshService.py b/modules/security/tokenRefreshService.py
new file mode 100644
index 00000000..649960bc
--- /dev/null
+++ b/modules/security/tokenRefreshService.py
@@ -0,0 +1,291 @@
+"""
+Token Refresh Service for PowerOn Gateway
+
+This service handles automatic token refresh for OAuth connections
+when they are accessed via API calls. It runs silently in the background
+to ensure users don't experience token expiration issues.
+"""
+
+import logging
+from typing import Optional, Dict, Any, List
+from datetime import datetime, timedelta
+from modules.interfaces.interfaceAppObjects import getInterface
+from modules.interfaces.interfaceAppModel import User, UserConnection, AuthAuthority, Token
+from modules.shared.timezoneUtils import get_utc_timestamp
+from modules.shared.auditLogger import audit_logger
+
+logger = logging.getLogger(__name__)
+
+class TokenRefreshService:
+ """Service for automatic token refresh operations"""
+
+ def __init__(self):
+ self.rate_limit_map = {} # Track refresh attempts per connection
+ self.max_attempts_per_hour = 3
+ self.refresh_window_minutes = 60
+
+ def _is_rate_limited(self, connection_id: str) -> bool:
+ """Check if connection is rate limited for refresh attempts"""
+ now = get_utc_timestamp()
+ if connection_id not in self.rate_limit_map:
+ return False
+
+ # Remove attempts older than 1 hour
+ recent_attempts = [
+ attempt_time for attempt_time in self.rate_limit_map[connection_id]
+ if now - attempt_time < (self.refresh_window_minutes * 60)
+ ]
+ self.rate_limit_map[connection_id] = recent_attempts
+
+ return len(recent_attempts) >= self.max_attempts_per_hour
+
+ def _record_refresh_attempt(self, connection_id: str) -> None:
+ """Record a refresh attempt for rate limiting"""
+ now = get_utc_timestamp()
+ if connection_id not in self.rate_limit_map:
+ self.rate_limit_map[connection_id] = []
+ self.rate_limit_map[connection_id].append(now)
+
+ async def _refresh_google_token(self, interface, connection: UserConnection) -> bool:
+ """Refresh Google OAuth token"""
+ try:
+ logger.debug(f"Refreshing Google token for connection {connection.id}")
+
+ # Get current token
+ current_token = interface.getConnectionToken(connection.id, auto_refresh=False)
+ if not current_token:
+ logger.warning(f"No Google token found for connection {connection.id}")
+ return False
+
+ # Import Google token refresh logic
+ from modules.security.tokenManager import TokenManager
+ token_manager = TokenManager()
+
+ # Attempt to refresh the token
+ refreshed_token = token_manager.refresh_token(current_token)
+ if refreshed_token:
+ # Save the refreshed token
+ interface.saveConnectionToken(refreshed_token)
+
+ # Update connection status
+ interface.db.recordModify(UserConnection, connection.id, {
+ "lastChecked": get_utc_timestamp(),
+ "expiresAt": refreshed_token.expiresAt
+ })
+
+ logger.info(f"Successfully refreshed Google token for connection {connection.id}")
+
+ # Log audit event
+ try:
+ audit_logger.log_security_event(
+ user_id=str(connection.userId),
+ mandate_id="system",
+ action="token_refresh",
+ details=f"Google token refreshed for connection {connection.id}"
+ )
+ except Exception:
+ pass
+
+ return True
+ else:
+ logger.warning(f"Failed to refresh Google token for connection {connection.id}")
+ return False
+
+ except Exception as e:
+ logger.error(f"Error refreshing Google token for connection {connection.id}: {str(e)}")
+ return False
+
+ async def _refresh_microsoft_token(self, interface, connection: UserConnection) -> bool:
+ """Refresh Microsoft OAuth token"""
+ try:
+ logger.debug(f"Refreshing Microsoft token for connection {connection.id}")
+
+ # Get current token
+ current_token = interface.getConnectionToken(connection.id, auto_refresh=False)
+ if not current_token:
+ logger.warning(f"No Microsoft token found for connection {connection.id}")
+ return False
+
+ # Import Microsoft token refresh logic
+ from modules.security.tokenManager import TokenManager
+ token_manager = TokenManager()
+
+ # Attempt to refresh the token
+ refreshed_token = token_manager.refresh_token(current_token)
+ if refreshed_token:
+ # Save the refreshed token
+ interface.saveConnectionToken(refreshed_token)
+
+ # Update connection status
+ interface.db.recordModify(UserConnection, connection.id, {
+ "lastChecked": get_utc_timestamp(),
+ "expiresAt": refreshed_token.expiresAt
+ })
+
+ logger.info(f"Successfully refreshed Microsoft token for connection {connection.id}")
+
+ # Log audit event
+ try:
+ audit_logger.log_security_event(
+ user_id=str(connection.userId),
+ mandate_id="system",
+ action="token_refresh",
+ details=f"Microsoft token refreshed for connection {connection.id}"
+ )
+ except Exception:
+ pass
+
+ return True
+ else:
+ logger.warning(f"Failed to refresh Microsoft token for connection {connection.id}")
+ return False
+
+ except Exception as e:
+ logger.error(f"Error refreshing Microsoft token for connection {connection.id}: {str(e)}")
+ return False
+
+ async def refresh_expired_tokens(self, user_id: str) -> Dict[str, Any]:
+ """
+ Refresh expired OAuth tokens for a user
+
+ Args:
+ user_id: User ID to refresh tokens for
+
+ Returns:
+ Dict with refresh results
+ """
+ try:
+ logger.debug(f"Starting silent token refresh for user {user_id}")
+
+ # Get user interface
+ from modules.interfaces.interfaceAppObjects import getRootInterface
+ root_interface = getRootInterface()
+
+ # Get user connections
+ connections = root_interface.getUserConnections(user_id)
+ if not connections:
+ logger.debug(f"No connections found for user {user_id}")
+ return {"refreshed": 0, "failed": 0, "rate_limited": 0}
+
+ refreshed_count = 0
+ failed_count = 0
+ rate_limited_count = 0
+
+ # Process each connection
+ for connection in connections:
+ # Only refresh expired OAuth connections
+ if (connection.tokenStatus == 'expired' and
+ connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT]):
+
+ # Check rate limiting
+ if self._is_rate_limited(connection.id):
+ logger.warning(f"Rate limited for connection {connection.id}")
+ rate_limited_count += 1
+ continue
+
+ # Record attempt
+ self._record_refresh_attempt(connection.id)
+
+ # Refresh based on authority
+ success = False
+ if connection.authority == AuthAuthority.GOOGLE:
+ success = await self._refresh_google_token(root_interface, connection)
+ elif connection.authority == AuthAuthority.MSFT:
+ success = await self._refresh_microsoft_token(root_interface, connection)
+
+ if success:
+ refreshed_count += 1
+ else:
+ failed_count += 1
+
+ result = {
+ "refreshed": refreshed_count,
+ "failed": failed_count,
+ "rate_limited": rate_limited_count
+ }
+
+ logger.info(f"Silent token refresh completed for user {user_id}: {result}")
+ return result
+
+ except Exception as e:
+ logger.error(f"Error during silent token refresh for user {user_id}: {str(e)}")
+ return {"refreshed": 0, "failed": 0, "rate_limited": 0, "error": str(e)}
+
+ async def proactive_refresh(self, user_id: str) -> Dict[str, Any]:
+ """
+ Proactively refresh tokens that expire within 5 minutes
+
+ Args:
+ user_id: User ID to check tokens for
+
+ Returns:
+ Dict with refresh results
+ """
+ try:
+ logger.debug(f"Starting proactive token refresh for user {user_id}")
+
+ # Get user interface
+ from modules.interfaces.interfaceAppObjects import getRootInterface
+ root_interface = getRootInterface()
+
+ # Get user connections
+ connections = root_interface.getUserConnections(user_id)
+ if not connections:
+ return {"refreshed": 0, "failed": 0, "rate_limited": 0}
+
+ refreshed_count = 0
+ failed_count = 0
+ rate_limited_count = 0
+ current_time = get_utc_timestamp()
+ five_minutes = 5 * 60 # 5 minutes in seconds
+
+ # Process each connection
+ for connection in connections:
+ # Only refresh active tokens that expire soon
+ if (connection.tokenStatus == 'active' and
+ connection.tokenExpiresAt and
+ connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT]):
+
+ # Check if token expires within 5 minutes
+ time_until_expiry = connection.tokenExpiresAt - current_time
+ if 0 < time_until_expiry <= five_minutes:
+
+ # Check rate limiting
+ if self._is_rate_limited(connection.id):
+ logger.warning(f"Rate limited for proactive refresh of connection {connection.id}")
+ rate_limited_count += 1
+ continue
+
+ # Record attempt
+ self._record_refresh_attempt(connection.id)
+
+ # Refresh based on authority
+ success = False
+ if connection.authority == AuthAuthority.GOOGLE:
+ success = await self._refresh_google_token(root_interface, connection)
+ elif connection.authority == AuthAuthority.MSFT:
+ success = await self._refresh_microsoft_token(root_interface, connection)
+
+ if success:
+ refreshed_count += 1
+ logger.info(f"Proactively refreshed {connection.authority} token for connection {connection.id}")
+ else:
+ failed_count += 1
+
+ result = {
+ "refreshed": refreshed_count,
+ "failed": failed_count,
+ "rate_limited": rate_limited_count
+ }
+
+ if refreshed_count > 0:
+ logger.info(f"Proactive token refresh completed for user {user_id}: {result}")
+
+ return result
+
+ except Exception as e:
+ logger.error(f"Error during proactive token refresh for user {user_id}: {str(e)}")
+ return {"refreshed": 0, "failed": 0, "rate_limited": 0, "error": str(e)}
+
+# Global service instance
+token_refresh_service = TokenRefreshService()