From b405cebdec1ce0d006e5598adf14b086c78d87ca Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Wed, 29 Apr 2026 00:35:21 +0200
Subject: [PATCH] kdrive fix
---
env_dev.20260428_213450.backup | 107 +++
env_dev.env | 2 +
env_int.20260428_213451.backup | 100 +++
env_int.env | 6 +-
env_prod.20260428_213451.backup | 101 +++
env_prod.env | 2 +
env_prod_forgejo.20260428_213451.backup | 101 +++
env_prod_forgejo.env | 4 +-
modules/aicore/aicorePluginOpenai.py | 42 +-
modules/auth/oauthProviderConfig.py | 22 +-
modules/auth/tokenManager.py | 67 +-
modules/auth/tokenRefreshService.py | 47 +-
.../providerGoogle/connectorGoogle.py | 472 +++++++++-
.../providerInfomaniak/connectorInfomaniak.py | 824 ++++++++++++++----
.../connectors/providerMsft/connectorMsft.py | 421 +++++++++
.../routeFeatureGraphicalEditor.py | 6 +
.../workspace/routeFeatureWorkspace.py | 6 +
modules/interfaces/interfaceDbApp.py | 5 +-
modules/routes/routeDataConnections.py | 40 +-
modules/routes/routeSecurityGoogle.py | 13 +-
modules/routes/routeSecurityInfomaniak.py | 468 +++++-----
modules/routes/routeSecurityMsft.py | 10 +-
tests/unit/aicore/__init__.py | 0
.../test_aicorePluginOpenai_temperature.py | 66 ++
24 files changed, 2367 insertions(+), 565 deletions(-)
create mode 100644 env_dev.20260428_213450.backup
create mode 100644 env_int.20260428_213451.backup
create mode 100644 env_prod.20260428_213451.backup
create mode 100644 env_prod_forgejo.20260428_213451.backup
create mode 100644 tests/unit/aicore/__init__.py
create mode 100644 tests/unit/aicore/test_aicorePluginOpenai_temperature.py
diff --git a/env_dev.20260428_213450.backup b/env_dev.20260428_213450.backup
new file mode 100644
index 00000000..b6cffdf0
--- /dev/null
+++ b/env_dev.20260428_213450.backup
@@ -0,0 +1,107 @@
+# Development Environment Configuration
+
+# System Configuration
+APP_ENV_TYPE = dev
+APP_ENV_LABEL = Development Instance Patrick
+APP_API_URL = http://localhost:8000
+APP_KEY_SYSVAR = D:/Athi/Local/Web/poweron/local/notes/key.txt
+APP_INIT_PASS_ADMIN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEeFFtRGtQeVUtcjlrU3dab1ZxUm9WSks0MlJVYUtERFlqUElHemZrOGNENk1tcmJNX3Vxc01UMDhlNU40VzZZRVBpUGNmT3podzZrOGhOeEJIUEt4eVlSWG5UYXA3d09DVXlLT21Kb1JYSUU9
+APP_INIT_PASS_EVENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpERzZjNm56WGVBdjJTeG5Udjd6OGQwUVotYXUzQjJ1YVNyVXVBa3NZVml3ODU0MVNkZjhWWmJwNUFkc19BcHlHMTU1Q3BRcHU0cDBoZkFlR2l6UEZQU3d2U3MtMDh5UDZteGFoQ0EyMUE1ckE9
+
+# PostgreSQL DB Host
+DB_HOST=localhost
+DB_USER=poweron_dev
+DB_PASSWORD_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEcUIxNEFfQ2xnS0RrSC1KNnUxTlVvTGZoMHgzaEI4Z3NlVzVROTVLak5Ubi1vaEZubFZaMTFKMGd6MXAxekN2d2NvMy1hRjg2UVhybktlcFA5anZ1WjFlQmZhcXdwaGhWdzRDc3ExeUhzWTg9
+DB_PORT=5432
+
+# Security Configuration
+APP_JWT_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpERjlrSktmZHVuQnJ1VVJDdndLaUcxZGJsT2ZlUFRlcFdOZ001RnlzM2FhLWhRV2tjWWFhaWQwQ3hkcUFvbThMcndxSjFpYTdfRV9OZGhTcksxbXFTZWg5MDZvOHpCVXBHcDJYaHlJM0tyNWRZckZsVHpQcmxTZHJoZUs1M3lfU2ljRnJaTmNSQ0w0X085OXI0QW80M2xfQnJqZmZ6VEh3TUltX0xzeE42SGtZPQ==
+APP_TOKEN_EXPIRY=300
+
+# CORS Configuration
+APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron-center.net
+
+# Logging configuration
+APP_LOGGING_LOG_LEVEL = DEBUG
+APP_LOGGING_LOG_DIR = D:/Athi/Local/Web/poweron/local/logs
+APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
+APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
+APP_LOGGING_CONSOLE_ENABLED = True
+APP_LOGGING_FILE_ENABLED = True
+APP_LOGGING_ROTATION_SIZE = 10485760
+APP_LOGGING_BACKUP_COUNT = 5
+
+# OAuth: Auth app (login/JWT) vs Data app (Microsoft Graph / Google APIs). Same IDs until you split apps in Azure / GCP.
+Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_AUTH_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm83T29rV1pQelMtc1p1MXR4NTFpa19CTEhHQ0xfNmdPUmZqcWp5UHBMS0hYTGl4c1pPdmhTNTJVWUl5WnlnUUZhV0VTRzVCb0d5YjR1NnZPZk5CZ0dGazNGdUJVbjkxeVdrYlNiVjJUYzF2aVFtQnVxTHFqTTJqZlF0RTFGNmE1OGN1TEk=
+Service_MSFT_AUTH_REDIRECT_URI = http://localhost:8000/api/msft/auth/login/callback
+Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm83T29rV1pQelMtc1p1MXR4NTFpa19CTEhHQ0xfNmdPUmZqcWp5UHBMS0hYTGl4c1pPdmhTNTJVWUl5WnlnUUZhV0VTRzVCb0d5YjR1NnZPZk5CZ0dGazNGdUJVbjkxeVdrYlNiVjJUYzF2aVFtQnVxTHFqTTJqZlF0RTFGNmE1OGN1TEk=
+Service_MSFT_DATA_REDIRECT_URI = http://localhost:8000/api/msft/auth/connect/callback
+
+Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_AUTH_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
+Service_GOOGLE_AUTH_REDIRECT_URI = http://localhost:8000/api/google/auth/login/callback
+Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
+Service_GOOGLE_DATA_REDIRECT_URI = http://localhost:8000/api/google/auth/connect/callback
+
+# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
+Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
+Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
+Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
+
+# Infomaniak OAuth -- Data App (kDrive + Mail)
+Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
+Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
+Service_INFOMANIAK_OAUTH_REDIRECT_URI = http://localhost:8000/api/infomaniak/auth/connect/callback
+
+# Stripe Billing (both end with _SECRET for encryption script)
+STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
+STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
+STRIPE_API_VERSION = 2026-01-28.clover
+STRIPE_AUTOMATIC_TAX_ENABLED = false
+STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
+
+# AI configuration
+Connector_AiOpenai_API_SECRET = DEV_ENC:Z0FBQUFBQnBaSnM4TWFRRmxVQmNQblVIYmc1Y0Q3aW9zZUtDWlNWdGZjbFpncGp2NHN2QjkxMWxibUJnZDBId252MWk5TXN3Yk14ajFIdi1CTkx2ZWx2QzF5OFR6LUx5azQ3dnNLaXJBOHNxc0tlWmtZcTFVelF4eXBSM2JkbHd2eTM0VHNXdHNtVUprZWtPVzctNlJsZHNmM20tU1N6Q1Q2cHFYSi1tNlhZNDNabTVuaEVGWmIydEhadTcyMlBURmw2aUJxOF9GTzR0dTZiNGZfOFlHaVpPZ1A1LXhhOEFtN1J5TEVNNWtMcGpyNkMzSl8xRnZsaTF1WTZrOUZmb0cxVURjSGFLS2dIYTQyZEJtTm90bEYxVWxNNXVPdTVjaVhYbXhxT3JsVDM5VjZMVFZKSE1tZnM9
+Connector_AiAnthropic_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpENmFBWG16STFQUVZxNzZZRzRLYTA4X3lRanF1VkF4cU45OExNMzlsQmdISGFxTUxud1dXODBKcFhMVG9KNjdWVnlTTFFROVc3NDlsdlNHLUJXeG41NDBHaXhHR0VHVWl5UW9RNkVWbmlhakRKVW5pM0R4VHk0LUw0TV9LdkljNHdBLXJua21NQkl2b3l4UkVkMGN1YjBrMmJEeWtMay1jbmxrYWJNbUV0aktCXzU1djR2d2RSQXZORTNwcG92ZUVvVGMtQzQzTTVncEZTRGRtZUFIZWQ0dz09
+Connector_AiPerplexity_API_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5ZmdDZ3hrSElrMnQzNFAtel9wX191VjVzN2g1LWZoa0V1YklubEdmMEJDdEZiR1RWeVZrM3V3enBHX3p6WUtTS0kwYkFyVEF0Nm8zX05CelVQcFJUc0lwVW5iNFczc1p1WWJ2WFBmd0lpLUxxWndEeUh0b2hGUHVpN19vb19nMTBnV1A1VmNpWERVX05lQ29VS20wTjZ3PT0=
+Connector_AiTavily_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEQTdnUHMwd2pIaXNtMmtCTFREd0pyQXRKb1F5eGtHSnkyOGZiUnlBOFc0b3Vzcndrc3ViRm1nMDJIOEZKYWxqdWNkZGh5N0Z4R0JlQmxXSG5pVnJUR2VYckZhMWNMZ1FNeXJ3enJLVlpiblhOZTNleUg3ZzZyUzRZanFSeDlVMkI=
+Connector_AiPrivateLlm_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGRHM5eFdUVmVZU1R1cHBwN1RlMUx4T0NlLTJLUFFVX3J2OElDWFpuZmJHVmp4Z3BNNWMwZUVVZUd2TFhRSjVmVkVlcFlVRWtybXh0ZHloZ01ZcnVvX195YjdlWVdEcjZSWFFTTlNBWUlaTlNoLWhqVFBIb0thVlBiaWhjYjFQOFY=
+Connector_AiMistral_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGeEQxYUIxOHhia0JlQWpWQ2dWQWZzY3l6SWwyUnJoR1hRQWloX2lxb2lGNkc4UnA4U2tWNjJaYzB1d1hvNG9fWUp1N3V4OW9FMGhaWVhjSlVwWEc1X2loVDBSZDEtdHdfcTA5QkcxQTR4OHc4RkRzclJrU2d1RFZpNDJkRDRURlE=
+
+Service_MSFT_TENANT_ID = common
+
+# Google Cloud Speech Services configuration
+Connector_GoogleSpeech_API_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETk5FWWM3Q0JKMzhIYTlyMkhuNjA4NlF4dk82U2NScHhTVGY3UG83NkhfX3RrcWVtWWcyLXRjU1dTT21zWEl6YWRMMUFndXpsUnJOeHh3QThsNDZKRXROTzdXRUdsT0JZajZJNVlfb0gtMXkwWm9DOERPVnpjU0pyUEZfOGJsUnprT3ltMVVhalUyUm9hMUFtZEtHUnJqOGZ4dEZjZm5SWVVTckVCWnY1UkdVSHVmUlgwbnAyc0xDQW84R3ViSko5OHVCVWZRUVNiaG1pVFB6X3EwS0FPd2dUYjhiSmRjcXh2WEZiXzI4SFZqT21tbDduUWRyVWdFZXpmcVM5ZDR0VWtzZnF5UER6cGwwS2JlLV9CSTZ0Z0IyQ1h0YW9TcmhRTXZEckp4bWhmTkt6UTNYMk4zVkpnbUJmaDIxZnoyR2dWTEYwTUFEV0w2eUdUUGpoZk9XRkt4RVF1Z1NPdUpBeTcyWV9PY1Ffd2s0ZEdVekxGekhoeEl4TmNqaXYtbUJuSVdycFducERWdWtZajZnX011Q2w4eE9VMTBqQ1ZxRmdScWhXY1E3WWhzX1JZcHhxam9FbDVPN3Q1MWtrMUZuTUg3LVFQVHp1T1hpQWNDMzEzekVJWk9ybl91YUVjSkFob1VaMi1ONEtuMnRSOEg1S3QybUMwbVZDejItajBLTjM2Zy1hNzZQMW5LLVVDVGdFWm5BZUxNeEFnUkZzU3dxV0lCUlc0LWo4b05GczVpOGZSV2ZxbFBwUml6OU5tYjdnTks3Y3hrVEZVTHlmc1NPdFh4WE5pWldEZklOQUxBbjBpMTlkX3FFQVJ6c2NSZGdzTThycE92VW82enZKamhiRGFnU25aZGlHZHhZd2lUUmhuTVptNjhoWVlJQkxIOEkzbzJNMjZCZFJyM25tdXBnQ2ZWaHV3b2p6UWJpdk9xUEhBc1dyTlNmeF9wbm5yYUhHV01UZnVXWDFlNzBkdXlWUWhvcmJpSmljbmE3LUpUZEg4VzRwZ2JVSjdYUm1sODViQXVxUzdGTmZFbVpiN2V1YW5XV3U4b2VRWmxldGVGVHZsSldoekhVLU9wZ2V0cGZIYkNqM2pXVGctQVAyUm4xTHhpd1VVLXFhcnVEV21Rby1hbTlqTl84TjVveHdYTExUVkhHQ0ltaTB2WXJnY1NQVE5PbWg3ejgySElYc1JSTlQ3NDlFUWR6STZVUjVqaXFRN200NF9LY1ljQ0R2UldlWUtKY1NQVnJ4QXRyYTBGSWVuenhyM0Z0cWtndTd1eG8xRzY5a2dNZ1hkQm5MV3BHVzA2N1QwUkd6WlRGYTZQOUhnVWQ2S0Y5U0s1dXFNVXh5Q2pLWVUxSUQ2MlR1ak52NmRIZ2hlYTk1SGZGWS1RV3hWVU9rR3d1Rk9MLS11REZXbzhqMHpsSm1HYW1jMUNLT29YOHZsRWNaLTVvOFpmT3l3MHVwaERTT0dNLWFjcGRYZ25qT2szTkVFUnRFR3JWYS1aNXFIRnMyalozTlQzNFF2NXJLVHVPVF9zdTF6ZjlkbzJ4RFc2ZENmNFFxZDZzTzhfMUl0bW96V0lPZkh1dXFYZlEteFBlSG84Si1FNS1TTi1OMkFnX2pOYW8xY3MxMVJnVC02MDUyaXZfMEVHWDQtVlRpcENmV0h3V0dCWEFRS2prQXdNRlQ5dnRFVHU0Q1dNTmh0SlBCaU55bFMydWM1TTFFLW96ODBnV3dNZHFZTWZhRURYSHlrdzF3RlRuWDBoQUhSOUJWemtRM3pxcDJFbGJoaTJ3ZktRTlJxbXltaHBoZXVJVDlxS3cxNWo2c0ZBV0NzaUstRWdsMW1xLXFkanZGYUFiU0tSLXFQa0tkcDFoMV9kak41ZjQ0R214UmtOR1ZBanRuemY3Mmw1SkZ5aDZodGIzT3N2aV85MW9kcld6c0g0ZDgtTWo3b3Y3VjJCRnR2U2tMVm9rUXNVRnVHbzZXVTZ6RmI2RkNmajBfMWVnODVFbnpkT0oyci15czJHU0p1cUowTGZJMzVnd3hIRjQyTVhKOGRkcFRKdVpyQ3Yzd01Jb1lSajFmV0paeEV0cjk1SmpmdWpDVFJMUmMtUFctOGhaTmlKQXNRVlVUNlhJemxudHZCR056SVlBb3NOTEYxRTRLaFlVd2d3TWtxVlB6ZEtQLTkxOGMyY3N0a2pYRFUweDBNaGhja2xSSklPOUZla1dKTWRNbG8tUGdSNEV5cW90OWlOZFlIUExBd3U2b2hyS1owbXVMM3p0Qm41cUtzWUxYNzB1N3JpUTNBSGdsT0NuamNTb1lIbXR4MG1sakNPVkxBUXRLVE1xX0YxWDhOcERIY1lTQVFqS01CaXZKNllFaXlIR0JsM1pKMmV1OUo3TGI1WkRaVnYxUTl1LTM0SU1qN1V1b0RCT0x0VHNLTmNLZnk1S0MxYnBBcm03WnVua0xqaEhGUzhOU253ZkppRzdudXBSVlMxeFVOSWxtZ1o2RVBSQUhEUEFuQ1hxSVZMME4yWUtaU3VyRGo3RkUyRUNjT0pNcE1BdE1ZRzdXVl8ydUtXZjdMdHdEVW4teHUtTi1HSGliLUxud21TX0NtcGVkRFBHNkZ1WTlNczR4OUJfUVluc1BoV09oWS1scUdsNnB5d1U5M1huX3k4QzAyNldtb2hybktYN2xKZ1NTNWFsaWwzV3pCRVhkaGR5eTNlV1d6ZzFfaFZTT0E4UjRpQ3pKdEZxUlJ6UFZXM3laUndyWEk2NlBXLUpoajVhZzVwQXpWVzUtVjVNZFBwdWdQa3AxZC1KdGdqNnhibjN4dmFYb2cxcEVwc1g5R09zRUdINUZtOE5QRjVUU0dpZy1QVl9odnFtVDNuWFZLSURtMXlSMlhRNTBWSVFJbEdOOWpfVWV0SmdRWDdlUXZZWE8xRUxDN1I0aEN6MHYwNzM1cmpJS0ZpMnBYWkxfb3FsbEV1VnlqWGxqdVJ6SHlwSjAzRlMycTBaQ295NXNnZERpUnJQcjhrUUd3bkI4bDVzRmxQblhkaFJPTTdISnVUQmhET3BOMTM4bjVvUEc2VmZhb2lrR1FyTUl2RWNEeGg0U0dsNnV6eU5zOUxiNDY5SXBxR0hBS00wOTgyWTFnWkQyaEtLVUloT3ZxZGh0RWVGRmJzenFsaUtfZENQM0JzdkVVeTdXR3hUSmJST1NBMUI1NkVFWncwNW5JZVVLX1p1RXdqVnFfQWpvQ08yQjZhN1NkTkpTSnUxOVRXZXE0WFEtZWxhZW1NNXYtQ2sya0VGLURmS01lMkctNVY3c2ZhN0ZGRFgwWHlabTFkeS1hcUZ1dDZ3cnpPQ3hha2IzVE11M0pqbklmU0diczBqTFBNZC1QZGp6VzNTSnJVSjJoWkJUQjVORG4tYUJmMEJtSUNUdVpEaGt6OTM3TjFOdVhXUHItZjRtZ25nU3NhZC1sVTVXNTRDTmxZbnlfeHNsdkpuMXhUYnE1MnpVQ0ZOclRWM1M4eHdXTzRXbFRZZVQtTS1iRVdXVWZMSGotcWg3MUxUYTFnSEEtanBCRHlZRUNIdGdpUFhsYjdYUndCZnRITzhMZVJ1dHFoVlVNb0duVjlxd0U4OGRuQVV3MG90R0hiYW5MWkxWVklzbWFRNzBfSUNrdzc5bVdtTXg0dExEYnRCaDI3c1I4TWFwLXZKR0wxSjRZYjZIV3ZqZjNqTWhFT0RGSDVMc1A1UzY2bDBiMGFSUy1fNVRQRzRJWDVydUpqb1ZfSHNVbldVeUN2YlAxSW5WVDdxVzJ1WHpLeUdmb0xWMDNHN05oQzY3YnhvUUdhS2xaOHNidkVvbTZtSHFlblhOYmwyR3NQdVJDRUdxREhWdF9ZcXhwUWxHc2hyLW5vUGhIUVhJNUNhY0hFU0ptVnI0TFVhZDE1TFBBUEstSkRoZWJ5MHJhUmZrR1ZrRlFtRGpxS1pOMmFMQjBsdjluY3FiYUU4eGJVVXlZVEpuNWdHVVhJMGtwaTdZR2NDbXd2eHpOQ09SeTV6N1BaVUpsR1pQVDBZcElJUUt6VnVpQmxSYnE4Y1BCWV9IRWdVV0p3enBGVHItdnBGN3NyNWFBWmkySnByWThsbDliSlExQmp3LVlBaDIyZXp6UnR6cU9rTzJmTDBlSVpON0tiWllMdm1oME1zTFl2S2ZYYllhQlY2VHNZRGtHUDY4U1lIVExLZTU4VzZxSTZrZHl1ZTBDc0g4SjI4WGYyZHV1bm9wQ3R2Z09ld1ZmUkN5alJGeHZKSHl1bWhQVXpNMzdjblpLcUhfSm02Qlh5S1FVN3lIcHl0NnlRPT0=
+
+# Feature SyncDelta JIRA configuration
+Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEbm0yRUJ6VUJKbUwyRW5kMnRaNW4wM2YxMkJUTXVXZUdmdVRCaUZIVHU2TTV2RWZLRmUtZkcwZE4yRUNlNDQ0aUJWYjNfdVg5YjV5c2JwMHhoUUYxZWdkeS11bXR0eGxRLWRVaVU3cUVQZWJlNDRtY1lWUDdqeDVFSlpXS0VFX21WajlRS3lHQjc0bS11akkybWV3QUFlR2hNWUNYLUdiRjZuN2dQODdDSExXWG1Dd2ZGclI2aUhlSWhETVZuY3hYdnhkb2c2LU1JTFBvWFpTNmZtMkNVOTZTejJwbDI2eGE0OS1xUlIwQnlCSmFxRFNCeVJNVzlOMDhTR1VUamx4RDRyV3p6Tk9qVHBrWWdySUM3TVRaYjd3N0JHMFhpdzFhZTNDLTFkRVQ2RVE4U19COXRhRWtNc0NVOHRqUS1CRDFpZ19xQmtFLU9YSDU3TXBZQXpVcld3PT0=
+
+# Teamsbot Browser Bot Service
+# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
+# The bot will connect back to localhost:8000 via WebSocket
+TEAMSBOT_BROWSER_BOT_URL = http://localhost:4100
+
+# Debug Configuration
+APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
+APP_DEBUG_CHAT_WORKFLOW_DIR = D:/Athi/Local/Web/poweron/local/debug
+APP_DEBUG_ACCOUNTING_SYNC_ENABLED = True
+APP_DEBUG_ACCOUNTING_SYNC_DIR = D:/Athi/Local/Web/poweron/local/debug/sync
+
+# Manadate Pre-Processing Servers
+PREPROCESS_ALTHAUS_CHAT_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGbEphQ3ZUMlFMQ2EwSGpoSE9NNzRJNTJtaGk1N0RGakdIYnVVeVFHZmF5OXB3QTVWLVNaZk9wNkhfQkZWRnVwRGRxem9iRzJIWXdpX1NIN2FwSExfT3c9PQ==
+
+# Preprocessor API Configuration
+PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
+PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
+
+# Azure Communication Services Email Configuration
+MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
+MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
+
+# Zurich WFS Parcels (dynamic map layer). Default: Stadt Zürich OGD. Override for full canton if wfs.zh.ch resolves.
+# Connector_ZhWfsParcels_WFS_URL = https://wfs.zh.ch/av
+# Connector_ZhWfsParcels_TYPENAMES = av_li_liegenschaften_a
+
diff --git a/env_dev.env b/env_dev.env
index 60bc5511..5ae0d219 100644
--- a/env_dev.env
+++ b/env_dev.env
@@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
+# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
+
# Stripe Billing (both end with _SECRET for encryption script)
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
diff --git a/env_int.20260428_213451.backup b/env_int.20260428_213451.backup
new file mode 100644
index 00000000..45236a09
--- /dev/null
+++ b/env_int.20260428_213451.backup
@@ -0,0 +1,100 @@
+# Integration Environment Configuration
+
+# System Configuration
+APP_ENV_TYPE = int
+APP_ENV_LABEL = Integration Instance
+APP_API_URL = https://gateway-int.poweron-center.net
+APP_KEY_SYSVAR = CONFIG_KEY
+APP_INIT_PASS_ADMIN_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjWm41MWZ4TUZGaVlrX3pWZWNwakJsY3Facm0wLVZDd1VKeTFoZEVZQnItcEdUUnVJS1NXeDBpM2xKbGRsYmxOSmRhc29PZjJSU2txQjdLbUVrTTE1NEJjUXBHbV9NOVJWZUR3QlJkQnJvTEU9
+APP_INIT_PASS_EVENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjdmtrakgxa0djekZVNGtTZV8wM2I5UUpCZllveVBMWXROYk5yS3BiV3JEelJSM09VYTRONHpnY3VtMGxDRk5JTEZSRFhtcDZ0RVRmZ1RicTFhb3c5dVZRQ1o4SmlkLVpPTW5MMTU2eTQ0Vkk9
+
+# PostgreSQL DB Host
+DB_HOST=gateway-int-server.postgres.database.azure.com
+DB_USER=heeshkdlby
+DB_PASSWORD_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjczYzOUtTa21MMGJVTUQ5UmFfdWc3YlhCbWZOeXFaNEE1QzdJV3BLVjhnalBkLVVCMm5BZzdxdlFXQXc2RHYzLWtPSFZkZE1iWG9rQ1NkVWlpRnF5TURVbnl1cm9iYXlSMGYxd1BGYVc0VDA9
+DB_PORT=5432
+
+# Security Configuration
+APP_JWT_KEY_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNUctb2RwU25iR3ZnanBOdHZhWUtIajZ1RnZzTEp4aDR0MktWRjNoeVBrY1Npd1R0VE9YVHp3M2w1cXRzbUxNaU82QUJvaDNFeVQyN05KblRWblBvbWtoT0VXbkNBbDQ5OHhwSUFnaDZGRG10Vmgtdm1YUkRsYUhFMzRVZURmSFlDTFIzVWg4MXNueDZyMGc5aVpFdWRxY3dkTExGM093ZTVUZVl5LUhGWnlRPQ==
+APP_TOKEN_EXPIRY=300
+
+# CORS Configuration
+APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net, https://nyla-int.poweron-center.net
+
+# Logging configuration
+APP_LOGGING_LOG_LEVEL = DEBUG
+APP_LOGGING_LOG_DIR = /home/site/wwwroot/
+APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
+APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
+APP_LOGGING_CONSOLE_ENABLED = True
+APP_LOGGING_FILE_ENABLED = True
+APP_LOGGING_ROTATION_SIZE = 10485760
+APP_LOGGING_BACKUP_COUNT = 5
+
+# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
+Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
+Service_MSFT_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/login/callback
+Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
+Service_MSFT_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/connect/callback
+
+Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
+Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/login/callback
+Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
+Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/connect/callback
+
+# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
+Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
+Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
+Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/clickup/auth/connect/callback
+
+# Infomaniak OAuth -- Data App (kDrive + Mail)
+Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
+Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
+Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/infomaniak/auth/connect/callback
+
+# Stripe Billing (both end with _SECRET for encryption script)
+STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
+STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
+STRIPE_API_VERSION = 2026-01-28.clover
+STRIPE_AUTOMATIC_TAX_ENABLED = false
+STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
+
+# AI configuration
+Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4MENkQ2xJVmE5WFZKUkh2SHJFby1YVXN3ZmVxRkptS3ZWRmlwdU93ZEJjSjlMV2NGbU5mS3NCdmFfcmFYTEJNZXFIQ3ozTWE4ZC1pemlQNk9wbjU1d3BPS0ZCTTZfOF8yWmVXMWx0TU1DamlJLVFhSTJXclZsY3hMVWlPcXVqQWtMdER4T252NHZUWEhUOTdIN1VGR3ltazEweXFqQ0lvb0hYWmxQQnpxb0JwcFNhRDNGWXdoRTVJWm9FalZpTUF5b1RqZlRaYnVKYkp0NWR5Vko1WWJ0Wmg2VWJzYXZ0Z3Q4UkpsTldDX2dsekhKMmM4YjRoa2RwemMwYVQwM2cyMFlvaU5mOTVTWGlROU8xY2ZVRXlxZzJqWkxURWlGZGI2STZNb0NpdEtWUnM9
+Connector_AiAnthropic_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjT1ZlRWVJdVZMT3ljSFJDcFdxRFBRVkZhS204NnN5RDBlQ0tpenhTM0FFVktuWW9mWHNwRWx2dHB0eDBSZ0JFQnZKWlp6c01pVGREWHd1eGpERnU0Q2xhaks1clQ1ZXVsdnd2ZzhpNXNQS1BhY3FjSkdkVEhHalNaRGR4emhpakZncnpDQUVxOHVXQzVUWmtQc0FsYmFwTF9TSG5FOUFtWk5Ick1NcHFvY2s1T1c2WXlRUFFJZnh6TWhuaVpMYmppcDR0QUx0a0R6RXlwbGRYb1R4dzJkUT09
+Connector_AiPerplexity_API_SECRET = INT_ENC:Z0FBQUFBQnB5dkd6UkhtU3lhYmZMSlo0bklQZ2s3UTFBSkprZTNwWkg5Q2lVa0wtenhxWXpva21xVDVMRjdKSmhpTmxWS05IUTRoRHdCbktSRVVjcVFnY1RfV0N2S2dyV0dTMlhxQlRFVm41RkFTWVQzQThuVkZwdlNuVC05QlVRVXB6Qjk3akNpYmY1MFR6R1ByMzlIMllRZlRRYVVRN2ZBPT0=
+Connector_AiTavily_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkdkJMTDY0akhXNzZDWHVYSEt1cDZoOWEzSktneHZEV2JndTNmWlNSMV9KbFNIZmQzeVlrNE5qUEIwcUlBSGM1a0hOZ3J6djIyOVhnZzI3M1dIUkdicl9FVXF3RGktMmlEYmhnaHJfWTdGUkktSXVUSGdQMC1vSEV6VE8zR2F1SVk=
+Connector_AiPrivateLlm_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGSjZ1NWh0aWc1R3Z4MHNaeS1HamtUbndhcUZFZDlqUDhjSmg5eHFfdlVkU0RsVkJ2UVRaMWs3aWhraG5jSlc0YkxNWHVmR2JoSW5ENFFCdkJBM0VienlKSnhzNnBKbTJOUTFKczRfWlQ3bWpmUkRTT1I1OGNUSTlQdExacGRpeXg=
+Connector_AiMistral_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGZTNtZ1E4TWIxSEU1OUlreUpxZkJIR0Vxcm9xRHRUbnBxbTQ1cXlkbnltWkJVdTdMYWZ4c3Fsam42TERWUTVhNzZFMU9xVjdyRGFCYml6bmZsZFd2YmJzemlrSWN6Q3o3X0NXX2xXNUQteTNONHdKYzJ5YVpLLWdhU2JhSTJQZnI=
+
+Service_MSFT_TENANT_ID = common
+
+# Google Cloud Speech Services configuration
+Connector_GoogleSpeech_API_KEY_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkNmVXZ1pWcHcydTF2MXF0ZGJoWHBydF85bTczTktiaEJ3Wk1vMW1mZVhDSG1yd0ZxR2ZuSGJTX0N3MWptWXFJTkNTWjh1SUVVTXI4UDVzcGdLMkU5SHJ2TUpkRlRoRWdnSldtYjNTQkh4UDJHY2xmdTdZQ1ZiMTZZcGZxS3RzaHdjV3dtVkZUcEpJcWx0b2xuQVR6ZmpoVFZPY1hNMTV2SnhDaC1IZEh4UUpLTy1ILXA4RG1zamJTbUJ4X0t2M2NkdzJPbEJxSmFpRzV3WC0wZThoVzlxcmpHZ3ZkLVlVY3REZk1vV19WQ05BOWN6cnJ4MWNYYnNiQ0FQSUVnUlpfM3BhMnlsVlZUOG5wM3pzM1lSN1UzWlZKUXRLczlHbjI1LTFvSUJ4SlVXMy1BNk43bE5Hb0RfTTVlWk9oZnFIaVg0SW5pbm9EcXRTTzU1RFlYY3dTcnpKWWNyNjN5T1BGZ0FmX253cEFncmhvZVRuM05KYzhkOEhFMFJsc2NBSEwzZVZ1R0JMOGxsekVwUE55alZaRXFrdzNWWVNGWXNmbnhKeWhQSFo2VXBTUlRPeHdvdVdncEFuOWgydEtsSUFneUN6cGVaTnBSdjNCdVJseGJFdmlMc203UFhLVlYyTENkaGg2dVN6Z2xwT1ZmTmN5bVZGUkM3ZWcyVkt2ckFUVVd3WFFwYnJjNVRobEh2SkVJbXRwUUpEOFJKQ1NUc0Q4NHNqUFhPSDh5cTV6MEcwSDEwRUJCQ2JiTTJlOE5nd3pMMkJaQ1dVYjMwZVVWWnlETmp2dkZ3aXEtQ29WNkxZTFkzYUkxdTlQUU1OTnhWWU12YU9MVnJQa1d2ZjRtUlhneTNubEMxTmp1eUNPOThSMlB3Y1F0T2tCdFNsNFlKalZPV25yR2QycVBUb096RmZ1V0FTaGsxLV9FWDBmenBIOXpMdGpLcUc0TWRoY2hlMFhYTzlET1ZRekw0ZHNwUVBQdVJBX2h6Q2ZzWVZJWTNybTJiekp3WmhmWF9SUFBXQzlqUjctcVlHWWVMZWVQallzR0JGTVF0WmtnWlg1aTM1bFprNVExZXY5dnNvWF93UjhwbkJ3RzNXaVJ2d2RRU3JJVlBvaVh4eTlBRUtqWkJia3dJQVVBV2Nqdm9FUTRUVW1TaHp2ZUwxT0N2ZndxQ2Nka1RYWXF0LWxIWFE0dTFQcVhncFFPM0hFdUUtYlFnemx3WkF4bjA1aDFULUdrZlVZbEJtRGRCdjJyVkdJSXozd0I0dF9zbWhOeHFqRDA4T1NVaWR5cjBwSVgwbllPU294NjZGTnM1bFhIdGpNQUxFOENWd3FCbGpSRFRmRXotQnU0N2lCVEU5RGF6Qi10S2U2NGdadDlrRjZtVE5oZkw5ZWFjXzhCTmxXQzNFTFgxRXVYY3J3YkxnbnlBSm9PY3h4MlM1NVFQbVNDRW5Ld1dvNWMxSmdoTXJuaE1pT2VFeXYwWXBHZ29MZDVlN2lwUUNIeGNCVVdQVi1rRXdJMWFncUlPTXR0MmZVQ1l0d09mZTdzWGFBWUJMUFd3b0RSOU8zeER2UWpNdzAxS0ZJWnB5S3FJdU9wUDJnTTNwMWw3VFVqVXQ3ZGZnU1RkUktkc0NhUHJ0SGFxZ0lVWDEzYjNtU2JfMGNWM1Y0dHlCTzNESEdENC1jUWF5MVppRzR1QlBNSUJySjFfRi1ENHEwcmJ4S3hQUFpXVHA0TG9DZWdoUlo5WnNSM1lCZm1KbEs2ak1yUUU4Wk9JcVJGUkJwc0NvUkMyTjhoTWxtZmVQeDREZVRKZkhYN2duLVNTeGZzdFdBVnhEandJSXB5QjM0azF0ckI3Tk1wSzFhNGVOUVRrNjU0cG9JQ29pN09xOFkwR1lMTlktaGp4TktxdTVtTnNEcldsV2pEZm5nQWpJc2hxY0hjQnVSWUR5VVdaUXBHWUloTzFZUC1oNzJ4UjZ1dnpLcDJxWEZtQlNIMWkzZ0hXWXdKeC1iLXdZWVJhcU04VFlpMU5pd2ZIdTdCdkVWVFVBdmJuRk16bEFFQTh4alBrcTV2RzliT2hGdTVPOXlRMjFuZktiRTZIamQ1VFVqS0hRTXhxcU1mdkgyQ1NjQmZfcjl4c3NJd0RIeDVMZUFBbHJqdEJxWWl3aWdGUEQxR3ZnMkNGdVB4RUxkZi1xOVlFQXh1NjRfbkFEaEJ5TVZlUGFrWVhSTVRPeGxqNlJDTHNsRWRrei1pYjhnUmZrb3BvWkQ2QXBzYjFHNXZoWU1LSExhLWtlYlJTZlJmYUM5Y1Rhb1pkMVYyWTByM3NTS0VXMG1ybm1BTVN2QXRYaXZqX2dKSkZrajZSS2cyVlNOQnd5Y29zMlVyaWlNbTJEb3FuUFFtbWNTNVpZTktUenFZSl91cVFXZjRkQUZyYmtPczU2S1RKQ19ONGFOTHlwX2hOOEE1UHZEVjhnT0xxRjMxTEE4SHhRbmlmTkZwVXJBdlJDbU5oZS05SzI4QVhEWDZaN2ZiSlFwUGRXSnB5TE9MZV9ia3pYcmZVa1dicG5FMHRXUFZXMWJQVDAwOEdDQzJmZEl0ZDhUOEFpZXZWWXl5Q2xwSmFienNCMldlb2NKb2ZRYV9KbUdHRzNUcjU1VUFhMzk1a2J6dDVuNTl6NTdpM0hGa3k0UWVtbF9pdDVsQVp2cndDLUU5dnNYOF9CLS0ySXhBSFdCSnpqV010bllBb3U0cEZZYVF5R2tSNFM5NlRhdS1fb1NqbDBKMkw0V2N0VEZhNExtQlR3ckZ3cVlCeHVXdXJ6X0s4cEtsaG5rVUxCN2RRbHQxTmcyVFBqYUxyOHJzeFBXVUJaRHpXbUoxdHZzMFBzQk1UTUFvX1pGNFNMNDFvZWdTdEUtMUNKMXNIeVlvQk1CeEdpZVdmN0tsSDVZZHJXSGt5c2o2MHdwSTZIMVBhRzM1eU43Q2FtcVNidExxczNJeUx5U2RuUG5EeHpCTlg2SV9WNk1ET3BRNXFuc0pNWlVvZUYtY21oRGtJSmwxQ09QbHBUV3BuS3B5NE9RVkhfellqZjJUQ0diSV94QlhQWmdaaC1TRWxsMUVWSXB0aE1McFZDZDNwQUVKZ2t5cXRTXzlRZVJwN0pZSnJSV21XMlh0TzFRVEl0c2I4QjBxOGRCYkNxek04a011X1lrb2poQ3h2LUhKTGJiUlhneHp5QWFBcE5nMElkNTVzM3JGOWtUQ19wNVBTaVVHUHFDNFJnNXJaWDNBSkMwbi1WbTdtSnFySkhNQl9ZQjZrR2xDcXhTRExhMmNHcGlyWjR3ZU9SSjRZd1l4ZjVPeHNiYk53SW5SYnZPTzNkd1lnZmFseV9tQ3BxM3lNYVBHT0J0elJnMTByZ3VHemxta0tVQzZZRllmQ2VLZ1ZCNDhUUTc3LWNCZXBMekFwWW1fQkQ1NktzNGFMYUdYTU0xbXprY1FONUNlUHNMY3h2NFJMMmhNa3VNdzF4TVFWQk9odnJUMjFJMVd3Z2N6Sms5aEM2SWlWZFViZ0JWTEpUWWM5NmIzOS1oQmRqdkt1NUUycFlVcUxERUZGbnZqTUxIYnJmMDBHZDEzbnJsWEEzSUo3UmNPUDg1dnRUU1FzcWtjTWZwUG9zM0JTY3RqMDdST2UxcXFTM0d0bGkwdFhnMk5LaUlxNWx3V1pLaVlLUFJXZzBzVl9Ia1V1OHdYUEFWOU50UndycGtCdzM0Q0NQamp2VTNqbFBLaGhsbUk5dUI5MjU5OHVySk1oY0drUWtXUloyVVRvOWJmbUVYRzFVeWNQczh2NXJCeVppRlZiWDNJaDhOSmRmX2lURTNVS3NXQXFZT1QtUmdvMWJoVWYxU3lqUUJhbzEyX3I3TXhwbm9wc1FoQ1ZUTlNBRjMyQTBTY2tzbHZ3RFUtTjVxQ0o1QXRTVks2WENwMGZCRGstNU1jN3FhUFJCQThyaFhhMVRsbnlSRXNGRmt3Yk01X21ldmV3bTItWm1JaGpZQWZROEFtT1d1UUtPQlhYVVFqT2NxLUxQenJHX3JfMEdscDRiMXcyZ1ZmU3NFMzVoelZJaDlvT0ZoRGQ2bmtlM0M5ZHlCd2ZMbnRZRkZUWHVBUEx4czNfTmtMckh5eXZrZFBzOEItOGRYOEhsMzBhZ0xlOWFjZzgteVBsdnpPT1pYdUxnbFNXYnhKaVB6QUxVdUJCOFpvU2x2c1FHZV94MDBOVWJhYkxISkswc0U5UmdPWFJLXzZNYklHTjN1QzRKaldKdEVHb0pOU284N3c2LXZGMGVleEZ5NGZ6OGV1dm1tM0J0aTQ3VFlNOEJrdEh3PT0=
+
+# Feature SyncDelta JIRA configuration
+Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkTUNsWm4wX0p6eXFDZmJ4dFdHNEs1MV9MUzdrb3RzeC1jVWVYZ0REWHRyZkFiaGZLcUQtTXFBZzZkNzRmQ0gxbEhGbUNlVVFfR1JEQTc0aldkZkgyWnBOcjdlUlZxR0tDTEdKRExULXAyUEtsVmNTMkRKU1BJNnFiM0hlMXo4YndMcHlRMExtZDQ3Zm9vNFhMcEZCcHpBPT0=
+
+# Teamsbot Browser Bot Service
+TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
+
+# Debug Configuration
+APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
+APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
+APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
+APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
+
+# Manadate Pre-Processing Servers
+PREPROCESS_ALTHAUS_CHAT_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4UkNBelhvckxCQUVjZm94N3BZUDcxaEMyckE2dm1lRVhqODhrWU1SUjNXZ3dQZlVJOWhveXFkZXpobW5xT0NneGZ2SkNUblFmYXd0WTBYNTl3UmRnSWc9PQ==
+
+# Preprocessor API Configuration
+PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
+PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
+
+# Azure Communication Services Email Configuration
+MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
+MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
diff --git a/env_int.env b/env_int.env
index ec880940..f6e8d8fa 100644
--- a/env_int.env
+++ b/env_int.env
@@ -49,11 +49,13 @@ Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/go
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
-Service_CLICKUP_OAUTH_REDIRECT_URI = http://gateway-int.poweron-center.net/api/clickup/auth/connect/callback
+Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/clickup/auth/connect/callback
+
+# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
# Stripe Billing (both end with _SECRET for encryption script)
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
-STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
+STRIPE_WEBHOOK_SECRET = INT_ENC:Z0FBQUFBQnA4UXZiUUVqTl9lREVRWTh1aHFDcFpwcXRkOUx4MS1ham9Ddkl6T0xzMnJuM1hhUHdGNG5CenY1MUg4RlJBOGFQTWl5cVd5MjJ2REItcHYyRmdLX3ZlT2p5Z3BRVkMtQnRoTVkteXlfaU92MVBtOEI0Ni1kbGlfa0NiRmFRRXNHLVE2NHI=
STRIPE_API_VERSION = 2026-01-28.clover
STRIPE_AUTOMATIC_TAX_ENABLED = false
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
diff --git a/env_prod.20260428_213451.backup b/env_prod.20260428_213451.backup
new file mode 100644
index 00000000..d7307743
--- /dev/null
+++ b/env_prod.20260428_213451.backup
@@ -0,0 +1,101 @@
+# Production Environment Configuration
+
+# System Configuration
+APP_ENV_TYPE = prod
+APP_ENV_LABEL = Production Instance
+APP_KEY_SYSVAR = CONFIG_KEY
+APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
+APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
+APP_API_URL = https://gateway-prod.poweron-center.net
+
+# PostgreSQL DB Host
+DB_HOST=gateway-prod-server.postgres.database.azure.com
+DB_USER=gzxxmcrdhn
+DB_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3Y1JScGxjZG9TdUkwaHRzSHZhRHpNcDV3N1U2TnIwZ21PRG5TWFFfR1k0N3BiRk5WelVadjlnXzVSTDZ6NXFQNFpqbnJ1R3dNVkJocm1zVEgtSk0xaDRiR19zNDBEbVIzSk51ekNlQ0Z3b0U9
+DB_PORT=5432
+
+# Security Configuration
+APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUlV5SVpaWXBNX1hpa2xPZGdMSWpnN2ZINHQxeGZnNHJweU5pZjlyYlY5Qm9zOUZEbl9wUEgtZHZXd1NhR19JSG9kbFU4MnFGQnllbFhRQVphRGQyNHlFVWR5VHQyUUpqN0stUmRuY2QyTi1oalczRHpLTEJqWURjZWs4YjZvT2U5YnFqcXEwdEpxV05fX05QMmtrPQ==
+APP_TOKEN_EXPIRY=300
+
+# CORS Configuration
+APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net,https://nyla-int.poweron-center.net
+
+# Logging configuration
+APP_LOGGING_LOG_LEVEL = DEBUG
+APP_LOGGING_LOG_DIR = /home/site/wwwroot/
+APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
+APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
+APP_LOGGING_CONSOLE_ENABLED = True
+APP_LOGGING_FILE_ENABLED = True
+APP_LOGGING_ROTATION_SIZE = 10485760
+APP_LOGGING_BACKUP_COUNT = 5
+
+# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
+Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
+Service_MSFT_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/login/callback
+Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
+Service_MSFT_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/connect/callback
+
+Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
+Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/login/callback
+Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
+Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/connect/callback
+
+# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
+Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
+Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
+Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
+
+# Infomaniak OAuth -- Data App (kDrive + Mail)
+Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
+Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
+Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/infomaniak/auth/connect/callback
+
+# Stripe Billing (both end with _SECRET for encryption script)
+STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
+STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
+STRIPE_API_VERSION = 2026-01-28.clover
+STRIPE_AUTOMATIC_TAX_ENABLED = false
+STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
+
+
+# AI configuration
+Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
+Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
+Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
+Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
+Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
+Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
+
+Service_MSFT_TENANT_ID = common
+
+# Google Cloud Speech Services configuration
+Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4NFQxaF9uN3h1cVB6dnZid1c1R1VfNDlSQ1NHMEVDZWtKanpMQ29CLXc1MXBqRm1hQ0YtWVhaejBMY1ZTOEFEVlpWQ3hrYkFza1E2RDNsYkdMMndNR0VGNTMwVDRGdURJY3hyaVFxVjEtSEYwNHJzeWM3WmlpZW9jU2E3NTgycEV2allqQ3dJRTNyRFAzaDJ6dklKeXpNRkJhYjFzUkptN2dpbkNpMklrcGxuZl9vTkt3T0JvNm1YTXd5UlkwZWptUXdWVFpnV2J4X3J2WUhIUlFkSElFVnlqMnlJRnNHTnlpMWs2R1dZc2ROWjNYZG85cndmd1E5cUZnVmZRYnVjTG43dXFmSWd2bGFfVWFWSmtpWkpndWNlSUNwcnFNU2NqZXFaV0xsY3l3SElLRkVHcHZGZERKV1ltcGhTS0dhTko1VTJLYzNoZjRkSGVEX3dTMWVVTmdDczV5cE1JQUdSbUJGUm11eFhTVjJHbkt0SzB4UG1Dc2xmbnp1Y041Y2RTeWRuWGdmQy1sTGx0MGtnM2VJQ3EyLXViRlNhTU9ybzZkR1N1bXE5SXhlZENWRFpWSGlYOWx4SUQ3UlR0ZEVxQkxNakRUVFRiUmFnbklOalphLUZkRFVVaXBRUk5NZW5PaUZydTFmQkNPSTdTVTNZd0plWXllNVFJdmN4MVcyTGlwMGFtVjBzOGRxR1FjbzhfYW5zdTB0ZEZBTTJhakltazh1dktNMUZsOUItdFdTb1pIaUxySllXNkdlY20zUS0wTnpFNTB2SU5acG1VcXhyaHBmME8takw3RDh5T043T2VGOV92TzNya2pWSlpYVjZDdXlZcjM3a0hPTlhkaW9oQmxqQlpGRFYyTTY4WmZmT3k4Tk1tdXRuSGdTUVpNT2NKenhXb05PdXBfSEdhMTNxNjdpNXlKUUI2YUgydFFPX1VvXzVJb0UxWTU2YVNiNDQ0QndZanhMMHR1cGdHWGhvcEg1QXEtSXZJdTdZUE12ZEVVWkF4QmtsQS1GYnY3SFIxSHlsOGVfcEpGS1A4QUVEQWNEOFZYYlljQ3ByTU03YU16Y0UzUnJQZEprSWNjT1ZXVEtDWi03Y3ZzRVdYUTlabXJISEo5THRHVXVuM0xqbzA4bGVlZVpOMk1QMmptb21tV0pTMlVoOXdWVU95UW1iQmttc2w1RG9mMWwxXzg1T2IxYUVmTUJEZkpUdTFDTzZ3RlBFeUFiX01iRTZNWkNaSG45TkFOM2pzbUJRZ2N0VFpoejJUTG1RODY3TzZpSzVkYUQzaEpfY2pSTkRzU0VpanlkdXVQQmJ2WU5peno4QWNLTDVxZTlhSHI3NnNiM0k0Y3JkQ0xaOU05bGtsQl8zQklvaktWSDZ4aVp2MHlYelJuUDJyTU9CZC1OZjJxNFc1dDcwSUlxaVh1LTMyWWFwU0IwUU9kOUFpMWpnOERtLTh1VmJiNGVwcXBMbU5fMjVZc0hFbmxQT2puSFd1ZGpyTkphLU5sVlBZWWxrWEZrWGJQWmVkN19tZFZfZ1l1V3pSWlA0V0ZxM2lrWnl2NU9WeTdCbDROSmhfeENKTFhMVXk1d195S2JMUFJoRXZjcVo4V2g0MTNKRnZhUE1wRkNPM3FZOGdVazJPeW5PSGpuZnFGTTdJMkRnam5rUlV6NFlqODlIelRYaEN5VjdJNnVwbllNODNCTFRHMWlXbmM1VlRxbXB3Wm9LRjVrQUpjYzRNMThUMWwwSVhBMUlyamtPZnE4R0o4bEdHay1zMjR5RDJkZ1lYRHZaNHVHU2otR3ZpN25LZlEySEU0UmdTNzJGVHNWQXMyb0dVMV9WUE13ODhZWUFaakxGOWZieGNXZkNYRnV5djEyWTZLcmdrajRBLU1rS1Z0VVRkOWlDMU9fMGVmYXFhZXJGMUhpNkdmb2hkbzZ1OWV6VlNmVzNISjVYTFh6SjJNdWR5MWZidE8yVEo2dnRrZXhMRXBPczUwTG13OGhNUVpIQm0zQmRKRnJ0Nl8wNW1Ob0dHRDVpU0NWREV3TkY2SjktdVBkMFU1ZXBmSFpHQ3FHNTRZdTJvaExpZVEtLTU4YTVyeFBpNDdEajZtWUc4c1dBeUJqQ3NIY1NLS0FIMUxGZzZxNFNkOG9ORGNHWWJCVnZuNnJVTEtoQi1mRTZyUl81ZWJJMi1KOGdERzBhNVRZeHRYUUlqY2JvMFlaNHhWMU9pWFFiZjdaLUhkaG15TTBPZVlkS2R5UVdENTI4QVFiY1RJV0ZNZnlpVWxfZmlnN1BXbGdrbjFGUkhzYl9qeHBxVVJacUE4bjZETENHVFpSamh0NVpOM2hMYTZjYzBuS3J0a3hhZGxSM1V5UHd2OTU3ZHY0Yy1xWDBkWUk0Ymp0MWVrS3YzSktKODhQZnY3QTZ1Wm1VZkZJbS1jamdreks1ZlhpQjFOUDFiOHJ2Nm9NcmdTdU5LQXV2RkZWZEFNZnVKUjVwcVY3dDdhQnpmRVJ6SmlvVXpDM0ZiYXh5bGE2X04tTE9qZ3BiTnN3TF9ZaFRxSUpjNjB1dXZBcy1TZHRHTjFjSUR3WUl4cE9VNzB5Rkk4U3Z1SVZYTl9sYXlZVk83UnFrMlVmcnBpam9lRUlCY19DdVJwOXl2TVVDV1pMRFZTZk9MY3Z1eXA0MnhGazc5YllQaWtOeTc4NjlOa2lGY05RRzY1cG9nbGpYelc4c3FicWxWRkg0YzRSamFlQ19zOU14YWJreU9pNDREZVJ3a0REMUxGTzF1XzI1bEF3VXVZRjlBeWFiLXJsOXgza3VZem1WckhWSnVNbDBNcldadU8xQ3RwOTl5NGgtVlR0QklCLWl5WkE4V1FlQTBCOVU1RE9sQlRrYUNZOGdfUmEwbEZvUTFGUEFWVmQ4V1FhOU9VNjZqemRpZm1sUDhZQTJ0YVBRbWZldkF5THV4QXpfdUtNZ0tlcGdSRFM3c0lDOTNQbnBxdmxYYWNpTmI3MW9BMlZIdTQ5RldudHpNQWQ5NDNPLVVTLXVVNzdHZXh4UXpZa3dVa2J4dTFDV1RkYjRnWXU2M3lJekRYWGNMcWU5OVh6U2xZWDh6MmpqcnpiOHlnMjA5S3RFQm1NZjNSM21adkVnTUpSYVhkTzNkNnJCTmljY0x1cl9kMkx3UHhySjZEdHREanZERzNEUTFlTkR0NWlBczAtdmFGTjdZNVpTMlkxV2czYW5RN2lqemg4eUViZDV6RjdKNXdFcUlvcVhoNkJ6eVJkR1pua1hnNzQwOEs2TXJYSlpGcW9qRDU2QjBOWFFtdXBJRkRKbmdZUF9ZSmRPVEtvUjVhLTV1NjdXQjRhS0duaEtJb2FrQnNjUTRvdFMxdkdTNk1NYlFHUFhhYTJ1eUN3WHN4UlJ4UjdrZjY0SzFGYWVFN1k0cGJnc1RjNmFUenR4NHljbVhablZSWHZmUVN3cXRHNjhsX1BSZWEzdTJUZFA0S2pTaU9YMnZIQ1ZPcGhWMFJqZkVEMWRMR1h3SnU0Z2FzZ3VGM3puNzdhVjhaQXNIWHFsbjB0TDVYSFdSNV9rdWhUUUhSZHBGYkJIVDB5SDdlMC13QTVnS0g5Qkg5RGNxSGJlelVndUhPcEQ0QkRKMTJTZUM1OXJhVm0zYjU0OVY2dk9MQVBheklIQXpVNW9Yc0ROVjEzaFZTWmVxYlBWMlNlSzladzJ6TmNuMG5FVVZkN1VZN1pfS2ZHa0lQcE80S24wSnQtVlJVV09OVWJ3M09YMkZpV2ktVF9ENHhKU2dfYUQ2aUVyamk0VHJHQmVfVHU4clpUTFoteW5aSWRPV1M0RDRMTms4NGRoYmJfVE82aUl2X3VieVJOdDhBQmRwdzdnRTVBNzZwaW93dUlZb3ZRYUtOeG9ULWxvNVp5a0haSjdkcUhRb3d6UGIxRUpCVkVYX2d6TkRqQVozUWxkNGFoc1FXYVd2YWNkME9Qclo0bjYxMFRWTy1nbnI5NTBJNzRMMDluUXRKYTFqQUN4d0d5aHVlamN3Tkk3NWJXeXR0TW9BeUg5Vnp4Q2RnZUY3b3AtMDlrNmlrSGR0eGRtbUdUd2lFRWg4MklEeWJHN2wwZEpVSXMxNDNOWjRFS0tPdWxhMmFCckhfRENIY184aEFDZXNrRDl2dHQtQW12UnRuQXJjaDJoTUpiYkNWQUtfRG9GMUZoNWM4UnBYZ29RWWs2NHcyUm5kdTF3Vk1GeFpiRUJLaVZ2UGFjbi1jV3lMV0N2ZDl4VERPN295X01NNG56ZjZkRzZoYUtmY1E5NlVXemx2SnVfb19iSXg0R2M3Mjd1a2JRPT0=
+
+# Feature SyncDelta JIRA configuration
+Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4d3Z4d2x6N1FhUktMU0RKbkxfY2pTQkRzXzJ6UXVEbDNCaFM3UHMtQVFGYzNmYWs4N0lMM1R2SFJuZTVFVmx6MGVEbXc5U3NOTnY1TWN0ZDNaamlHQWloalM3VldmREJNSHQ1TlVkSVFJMTVhQWVGSVRMTGw4UTBqNGlQZFVuaHp4WUlKemR5UnBXZlh0REJFLXJ4ejR3PT0=
+
+# Teamsbot Browser Bot Service
+TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
+
+# Debug Configuration
+APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
+APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
+APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
+APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
+
+# Manadate Pre-Processing Servers
+PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
+
+# Preprocessor API Configuration
+PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
+PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
+
+# Azure Communication Services Email Configuration
+MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
+MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
diff --git a/env_prod.env b/env_prod.env
index 645cc5b7..09ec8c34 100644
--- a/env_prod.env
+++ b/env_prod.env
@@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
+# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
+
# Stripe Billing (both end with _SECRET for encryption script)
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
diff --git a/env_prod_forgejo.20260428_213451.backup b/env_prod_forgejo.20260428_213451.backup
new file mode 100644
index 00000000..f6193f2c
--- /dev/null
+++ b/env_prod_forgejo.20260428_213451.backup
@@ -0,0 +1,101 @@
+# Production Environment Configuration
+
+# System Configuration
+APP_ENV_TYPE = prod
+APP_ENV_LABEL = Production Instance Forgejo
+APP_KEY_SYSVAR = /srv/gateway/shared/secrets/master_key.txt
+APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
+APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
+APP_API_URL = https://api.poweron.swiss
+
+# PostgreSQL DB Host
+DB_HOST=10.20.0.21
+DB_USER=poweron_dev
+DB_PASSWORD_SECRET = mypassword
+DB_PORT=5432
+
+# Security Configuration
+APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUlV5SVpaWXBNX1hpa2xPZGdMSWpnN2ZINHQxeGZnNHJweU5pZjlyYlY5Qm9zOUZEbl9wUEgtZHZXd1NhR19JSG9kbFU4MnFGQnllbFhRQVphRGQyNHlFVWR5VHQyUUpqN0stUmRuY2QyTi1oalczRHpLTEJqWURjZWs4YjZvT2U5YnFqcXEwdEpxV05fX05QMmtrPQ==
+APP_TOKEN_EXPIRY=300
+
+# CORS Configuration
+APP_ALLOWED_ORIGINS=https://porta.poweron.swiss
+
+# Logging configuration
+APP_LOGGING_LOG_LEVEL = DEBUG
+APP_LOGGING_LOG_DIR = srv/gateway/shared/logs
+APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
+APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
+APP_LOGGING_CONSOLE_ENABLED = True
+APP_LOGGING_FILE_ENABLED = True
+APP_LOGGING_ROTATION_SIZE = 10485760
+APP_LOGGING_BACKUP_COUNT = 5
+
+# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
+Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
+Service_MSFT_AUTH_REDIRECT_URI=https://api.poweron.swiss/api/msft/auth/login/callback
+Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
+Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
+Service_MSFT_DATA_REDIRECT_URI = https://api.poweron.swiss/api/msft/auth/connect/callback
+
+Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
+Service_GOOGLE_AUTH_REDIRECT_URI =
+Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
+Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
+Service_GOOGLE_DATA_REDIRECT_URI =
+
+# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
+Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
+Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
+Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
+
+# Infomaniak OAuth -- Data App (kDrive + Mail)
+Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
+Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
+Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/infomaniak/auth/connect/callback
+
+# Stripe Billing (both end with _SECRET for encryption script)
+STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
+STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
+STRIPE_API_VERSION = 2026-01-28.clover
+STRIPE_AUTOMATIC_TAX_ENABLED = false
+STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
+
+
+# AI configuration
+Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
+Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
+Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
+Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
+Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
+Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
+
+Service_MSFT_TENANT_ID = common
+
+# Google Cloud Speech Services configuration
+Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4NFQxaF9uN3h1cVB6dnZid1c1R1VfNDlSQ1NHMEVDZWtKanpMQ29CLXc1MXBqRm1hQ0YtWVhaejBMY1ZTOEFEVlpWQ3hrYkFza1E2RDNsYkdMMndNR0VGNTMwVDRGdURJY3hyaVFxVjEtSEYwNHJzeWM3WmlpZW9jU2E3NTgycEV2allqQ3dJRTNyRFAzaDJ6dklKeXpNRkJhYjFzUkptN2dpbkNpMklrcGxuZl9vTkt3T0JvNm1YTXd5UlkwZWptUXdWVFpnV2J4X3J2WUhIUlFkSElFVnlqMnlJRnNHTnlpMWs2R1dZc2ROWjNYZG85cndmd1E5cUZnVmZRYnVjTG43dXFmSWd2bGFfVWFWSmtpWkpndWNlSUNwcnFNU2NqZXFaV0xsY3l3SElLRkVHcHZGZERKV1ltcGhTS0dhTko1VTJLYzNoZjRkSGVEX3dTMWVVTmdDczV5cE1JQUdSbUJGUm11eFhTVjJHbkt0SzB4UG1Dc2xmbnp1Y041Y2RTeWRuWGdmQy1sTGx0MGtnM2VJQ3EyLXViRlNhTU9ybzZkR1N1bXE5SXhlZENWRFpWSGlYOWx4SUQ3UlR0ZEVxQkxNakRUVFRiUmFnbklOalphLUZkRFVVaXBRUk5NZW5PaUZydTFmQkNPSTdTVTNZd0plWXllNVFJdmN4MVcyTGlwMGFtVjBzOGRxR1FjbzhfYW5zdTB0ZEZBTTJhakltazh1dktNMUZsOUItdFdTb1pIaUxySllXNkdlY20zUS0wTnpFNTB2SU5acG1VcXhyaHBmME8takw3RDh5T043T2VGOV92TzNya2pWSlpYVjZDdXlZcjM3a0hPTlhkaW9oQmxqQlpGRFYyTTY4WmZmT3k4Tk1tdXRuSGdTUVpNT2NKenhXb05PdXBfSEdhMTNxNjdpNXlKUUI2YUgydFFPX1VvXzVJb0UxWTU2YVNiNDQ0QndZanhMMHR1cGdHWGhvcEg1QXEtSXZJdTdZUE12ZEVVWkF4QmtsQS1GYnY3SFIxSHlsOGVfcEpGS1A4QUVEQWNEOFZYYlljQ3ByTU03YU16Y0UzUnJQZEprSWNjT1ZXVEtDWi03Y3ZzRVdYUTlabXJISEo5THRHVXVuM0xqbzA4bGVlZVpOMk1QMmptb21tV0pTMlVoOXdWVU95UW1iQmttc2w1RG9mMWwxXzg1T2IxYUVmTUJEZkpUdTFDTzZ3RlBFeUFiX01iRTZNWkNaSG45TkFOM2pzbUJRZ2N0VFpoejJUTG1RODY3TzZpSzVkYUQzaEpfY2pSTkRzU0VpanlkdXVQQmJ2WU5peno4QWNLTDVxZTlhSHI3NnNiM0k0Y3JkQ0xaOU05bGtsQl8zQklvaktWSDZ4aVp2MHlYelJuUDJyTU9CZC1OZjJxNFc1dDcwSUlxaVh1LTMyWWFwU0IwUU9kOUFpMWpnOERtLTh1VmJiNGVwcXBMbU5fMjVZc0hFbmxQT2puSFd1ZGpyTkphLU5sVlBZWWxrWEZrWGJQWmVkN19tZFZfZ1l1V3pSWlA0V0ZxM2lrWnl2NU9WeTdCbDROSmhfeENKTFhMVXk1d195S2JMUFJoRXZjcVo4V2g0MTNKRnZhUE1wRkNPM3FZOGdVazJPeW5PSGpuZnFGTTdJMkRnam5rUlV6NFlqODlIelRYaEN5VjdJNnVwbllNODNCTFRHMWlXbmM1VlRxbXB3Wm9LRjVrQUpjYzRNMThUMWwwSVhBMUlyamtPZnE4R0o4bEdHay1zMjR5RDJkZ1lYRHZaNHVHU2otR3ZpN25LZlEySEU0UmdTNzJGVHNWQXMyb0dVMV9WUE13ODhZWUFaakxGOWZieGNXZkNYRnV5djEyWTZLcmdrajRBLU1rS1Z0VVRkOWlDMU9fMGVmYXFhZXJGMUhpNkdmb2hkbzZ1OWV6VlNmVzNISjVYTFh6SjJNdWR5MWZidE8yVEo2dnRrZXhMRXBPczUwTG13OGhNUVpIQm0zQmRKRnJ0Nl8wNW1Ob0dHRDVpU0NWREV3TkY2SjktdVBkMFU1ZXBmSFpHQ3FHNTRZdTJvaExpZVEtLTU4YTVyeFBpNDdEajZtWUc4c1dBeUJqQ3NIY1NLS0FIMUxGZzZxNFNkOG9ORGNHWWJCVnZuNnJVTEtoQi1mRTZyUl81ZWJJMi1KOGdERzBhNVRZeHRYUUlqY2JvMFlaNHhWMU9pWFFiZjdaLUhkaG15TTBPZVlkS2R5UVdENTI4QVFiY1RJV0ZNZnlpVWxfZmlnN1BXbGdrbjFGUkhzYl9qeHBxVVJacUE4bjZETENHVFpSamh0NVpOM2hMYTZjYzBuS3J0a3hhZGxSM1V5UHd2OTU3ZHY0Yy1xWDBkWUk0Ymp0MWVrS3YzSktKODhQZnY3QTZ1Wm1VZkZJbS1jamdreks1ZlhpQjFOUDFiOHJ2Nm9NcmdTdU5LQXV2RkZWZEFNZnVKUjVwcVY3dDdhQnpmRVJ6SmlvVXpDM0ZiYXh5bGE2X04tTE9qZ3BiTnN3TF9ZaFRxSUpjNjB1dXZBcy1TZHRHTjFjSUR3WUl4cE9VNzB5Rkk4U3Z1SVZYTl9sYXlZVk83UnFrMlVmcnBpam9lRUlCY19DdVJwOXl2TVVDV1pMRFZTZk9MY3Z1eXA0MnhGazc5YllQaWtOeTc4NjlOa2lGY05RRzY1cG9nbGpYelc4c3FicWxWRkg0YzRSamFlQ19zOU14YWJreU9pNDREZVJ3a0REMUxGTzF1XzI1bEF3VXVZRjlBeWFiLXJsOXgza3VZem1WckhWSnVNbDBNcldadU8xQ3RwOTl5NGgtVlR0QklCLWl5WkE4V1FlQTBCOVU1RE9sQlRrYUNZOGdfUmEwbEZvUTFGUEFWVmQ4V1FhOU9VNjZqemRpZm1sUDhZQTJ0YVBRbWZldkF5THV4QXpfdUtNZ0tlcGdSRFM3c0lDOTNQbnBxdmxYYWNpTmI3MW9BMlZIdTQ5RldudHpNQWQ5NDNPLVVTLXVVNzdHZXh4UXpZa3dVa2J4dTFDV1RkYjRnWXU2M3lJekRYWGNMcWU5OVh6U2xZWDh6MmpqcnpiOHlnMjA5S3RFQm1NZjNSM21adkVnTUpSYVhkTzNkNnJCTmljY0x1cl9kMkx3UHhySjZEdHREanZERzNEUTFlTkR0NWlBczAtdmFGTjdZNVpTMlkxV2czYW5RN2lqemg4eUViZDV6RjdKNXdFcUlvcVhoNkJ6eVJkR1pua1hnNzQwOEs2TXJYSlpGcW9qRDU2QjBOWFFtdXBJRkRKbmdZUF9ZSmRPVEtvUjVhLTV1NjdXQjRhS0duaEtJb2FrQnNjUTRvdFMxdkdTNk1NYlFHUFhhYTJ1eUN3WHN4UlJ4UjdrZjY0SzFGYWVFN1k0cGJnc1RjNmFUenR4NHljbVhablZSWHZmUVN3cXRHNjhsX1BSZWEzdTJUZFA0S2pTaU9YMnZIQ1ZPcGhWMFJqZkVEMWRMR1h3SnU0Z2FzZ3VGM3puNzdhVjhaQXNIWHFsbjB0TDVYSFdSNV9rdWhUUUhSZHBGYkJIVDB5SDdlMC13QTVnS0g5Qkg5RGNxSGJlelVndUhPcEQ0QkRKMTJTZUM1OXJhVm0zYjU0OVY2dk9MQVBheklIQXpVNW9Yc0ROVjEzaFZTWmVxYlBWMlNlSzladzJ6TmNuMG5FVVZkN1VZN1pfS2ZHa0lQcE80S24wSnQtVlJVV09OVWJ3M09YMkZpV2ktVF9ENHhKU2dfYUQ2aUVyamk0VHJHQmVfVHU4clpUTFoteW5aSWRPV1M0RDRMTms4NGRoYmJfVE82aUl2X3VieVJOdDhBQmRwdzdnRTVBNzZwaW93dUlZb3ZRYUtOeG9ULWxvNVp5a0haSjdkcUhRb3d6UGIxRUpCVkVYX2d6TkRqQVozUWxkNGFoc1FXYVd2YWNkME9Qclo0bjYxMFRWTy1nbnI5NTBJNzRMMDluUXRKYTFqQUN4d0d5aHVlamN3Tkk3NWJXeXR0TW9BeUg5Vnp4Q2RnZUY3b3AtMDlrNmlrSGR0eGRtbUdUd2lFRWg4MklEeWJHN2wwZEpVSXMxNDNOWjRFS0tPdWxhMmFCckhfRENIY184aEFDZXNrRDl2dHQtQW12UnRuQXJjaDJoTUpiYkNWQUtfRG9GMUZoNWM4UnBYZ29RWWs2NHcyUm5kdTF3Vk1GeFpiRUJLaVZ2UGFjbi1jV3lMV0N2ZDl4VERPN295X01NNG56ZjZkRzZoYUtmY1E5NlVXemx2SnVfb19iSXg0R2M3Mjd1a2JRPT0=
+
+# Feature SyncDelta JIRA configuration
+Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4d3Z4d2x6N1FhUktMU0RKbkxfY2pTQkRzXzJ6UXVEbDNCaFM3UHMtQVFGYzNmYWs4N0lMM1R2SFJuZTVFVmx6MGVEbXc5U3NOTnY1TWN0ZDNaamlHQWloalM3VldmREJNSHQ1TlVkSVFJMTVhQWVGSVRMTGw4UTBqNGlQZFVuaHp4WUlKemR5UnBXZlh0REJFLXJ4ejR3PT0=
+
+# Teamsbot Browser Bot Service
+TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
+
+# Debug Configuration
+APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
+APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
+APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
+APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
+
+# Manadate Pre-Processing Servers
+PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
+
+# Preprocessor API Configuration
+PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
+PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
+
+# Azure Communication Services Email Configuration
+MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
+MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
diff --git a/env_prod_forgejo.env b/env_prod_forgejo.env
index b0fab036..e0ab455b 100644
--- a/env_prod_forgejo.env
+++ b/env_prod_forgejo.env
@@ -11,7 +11,7 @@ APP_API_URL = https://api.poweron.swiss
# PostgreSQL DB Host
DB_HOST=10.20.0.21
DB_USER=poweron_dev
-DB_PASSWORD_SECRET = mypassword
+DB_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQnA4UXZiMnRoUzVlbVRLX3JTRl94cVpMaURtMndZVmFBYXdvdnIxLV81dWwxWmhmcUlCMUFZbDhRT2NsQmNqSl9ZMmRWRVN1Y2JqNlVwOXRJY1VBTm1oSjNiaFE9PQ==
DB_PORT=5432
# Security Configuration
@@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
+# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
+
# Stripe Billing (both end with _SECRET for encryption script)
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
diff --git a/modules/aicore/aicorePluginOpenai.py b/modules/aicore/aicorePluginOpenai.py
index 2efc110e..259ca117 100644
--- a/modules/aicore/aicorePluginOpenai.py
+++ b/modules/aicore/aicorePluginOpenai.py
@@ -11,6 +11,30 @@ from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingMode
logger = logging.getLogger(__name__)
+
+def _supportsCustomTemperature(modelName: str) -> bool:
+ """Check whether an OpenAI model accepts a custom `temperature` value.
+
+ GPT-5.x and the o-series (o1/o3/o4) reasoning models reject every
+ `temperature` value other than the default (1) with HTTP 400
+ `unsupported_value`. For these models we must omit `temperature`
+ from the payload entirely. Older chat-completions models
+ (gpt-4o, gpt-4o-mini, gpt-4.1, gpt-3.5-*) still accept any value
+ in [0, 2].
+
+ Returns:
+ True if `temperature` may be sent; False if it must be omitted.
+ """
+ if not modelName:
+ return True
+ name = modelName.lower()
+ if name.startswith("gpt-5"):
+ return False
+ if name.startswith("o1") or name.startswith("o3") or name.startswith("o4"):
+ return False
+ return True
+
+
def loadConfigData():
"""Load configuration data for OpenAI connector"""
return {
@@ -344,14 +368,18 @@ class AiOpenai(BaseConnectorAi):
payload = {
"model": model.name,
"messages": messages,
- "temperature": temperature,
# Universal output-length cap. `max_tokens` is deprecated and
# rejected outright by gpt-5.x / o-series; `max_completion_tokens`
# is accepted by every current chat-completions model (legacy
# gpt-4o, gpt-4.1, gpt-5.x, o1/o3/o4) per OpenAI API reference.
"max_completion_tokens": maxTokens
}
-
+ # gpt-5.x and o-series only accept the default temperature (1) and
+ # return HTTP 400 `unsupported_value` for anything else - omit the
+ # field entirely for those models.
+ if _supportsCustomTemperature(model.name):
+ payload["temperature"] = temperature
+
if modelCall.tools:
payload["tools"] = modelCall.tools
payload["tool_choice"] = modelCall.toolChoice or "auto"
@@ -428,13 +456,15 @@ class AiOpenai(BaseConnectorAi):
payload: Dict[str, Any] = {
"model": model.name,
"messages": messages,
- "temperature": temperature,
# See callAiBasic for the rationale: `max_completion_tokens`
# is the universal output-length parameter; `max_tokens` is
# deprecated and rejected by gpt-5.x / o-series.
"max_completion_tokens": model.maxTokens,
"stream": True,
}
+ if _supportsCustomTemperature(model.name):
+ payload["temperature"] = temperature
+
if modelCall.tools:
payload["tools"] = modelCall.tools
payload["tool_choice"] = modelCall.toolChoice or "auto"
@@ -585,15 +615,15 @@ class AiOpenai(BaseConnectorAi):
# Use the messages directly - they should already contain the image data
# in the format: {"type": "image_url", "image_url": {"url": "data:...base64,..."}}
- # Use parameters from model
temperature = model.temperature
# Don't set maxTokens - let the model use its full context length
-
+
payload = {
"model": model.name,
"messages": messages,
- "temperature": temperature
}
+ if _supportsCustomTemperature(model.name):
+ payload["temperature"] = temperature
response = await self.httpClient.post(
model.apiUrl,
diff --git a/modules/auth/oauthProviderConfig.py b/modules/auth/oauthProviderConfig.py
index 5de9c47b..b6c482e7 100644
--- a/modules/auth/oauthProviderConfig.py
+++ b/modules/auth/oauthProviderConfig.py
@@ -9,13 +9,15 @@ googleAuthScopes = [
"https://www.googleapis.com/auth/userinfo.profile",
]
-# Google — Data app (Gmail + Drive + identity for token responses)
+# Google — Data app (Gmail + Drive + Calendar + Contacts + identity for token responses)
googleDataScopes = [
"openid",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile",
"https://www.googleapis.com/auth/gmail.readonly",
"https://www.googleapis.com/auth/drive.readonly",
+ "https://www.googleapis.com/auth/calendar.readonly",
+ "https://www.googleapis.com/auth/contacts.readonly",
]
# Microsoft — Auth app: Graph profile only (MSAL adds openid, profile, offline_access, …)
@@ -34,6 +36,8 @@ msftDataScopes = [
"OnlineMeetings.Read",
"Chat.ReadWrite",
"ChatMessage.Send",
+ "Calendars.Read",
+ "Contacts.Read",
]
@@ -42,14 +46,8 @@ def msftDataScopesForRefresh() -> str:
return " ".join(msftDataScopes)
-# Infomaniak — Data app (kDrive + Mail; user_info needed for /1/profile lookup)
-infomaniakDataScopes = [
- "user_info",
- "kdrive",
- "mail",
-]
-
-
-def infomaniakDataScopesForRefresh() -> str:
- """Space-separated scope string identical to authorization request."""
- return " ".join(infomaniakDataScopes)
+# Infomaniak intentionally has no OAuth scope set: the kDrive + Mail data APIs
+# are only reachable with manually issued Personal Access Tokens (see
+# wiki/d-guides/infomaniak-token-setup.md). The OAuth /authorize endpoint at
+# login.infomaniak.com only accepts identity scopes (openid/profile/email/phone)
+# and does not return tokens that work against /1/* data routes.
diff --git a/modules/auth/tokenManager.py b/modules/auth/tokenManager.py
index 659b7088..e854f563 100644
--- a/modules/auth/tokenManager.py
+++ b/modules/auth/tokenManager.py
@@ -13,7 +13,7 @@ from modules.datamodels.datamodelSecurity import Token, TokenPurpose
from modules.datamodels.datamodelUam import AuthAuthority
from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp, createExpirationTimestamp, parseTimestamp
-from modules.auth.oauthProviderConfig import msftDataScopesForRefresh, infomaniakDataScopesForRefresh
+from modules.auth.oauthProviderConfig import msftDataScopesForRefresh
logger = logging.getLogger(__name__)
@@ -30,9 +30,6 @@ class TokenManager:
self.google_client_id = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_ID")
self.google_client_secret = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_SECRET")
- # Infomaniak Data OAuth (kDrive + Mail)
- self.infomaniak_client_id = APP_CONFIG.get("Service_INFOMANIAK_DATA_CLIENT_ID")
- self.infomaniak_client_secret = APP_CONFIG.get("Service_INFOMANIAK_DATA_CLIENT_SECRET")
def refreshMicrosoftToken(self, refreshToken: str, userId: str, oldToken: Token) -> Optional[Token]:
"""Refresh Microsoft OAuth token using refresh token"""
@@ -166,65 +163,6 @@ class TokenManager:
logger.error(f"Error refreshing Google token: {str(e)}")
return None
- def refreshInfomaniakToken(self, refreshToken: str, userId: str, oldToken: Token) -> Optional[Token]:
- """Refresh Infomaniak OAuth token using refresh token"""
- try:
- logger.debug(f"refreshInfomaniakToken: Starting Infomaniak token refresh for user {userId}")
-
- if not self.infomaniak_client_id or not self.infomaniak_client_secret:
- logger.error("Infomaniak OAuth configuration not found")
- return None
-
- tokenUrl = "https://login.infomaniak.com/token"
- data = {
- "client_id": self.infomaniak_client_id,
- "client_secret": self.infomaniak_client_secret,
- "grant_type": "refresh_token",
- "refresh_token": refreshToken,
- "scope": infomaniakDataScopesForRefresh(),
- }
-
- with httpx.Client(timeout=30.0) as client:
- response = client.post(tokenUrl, data=data)
- logger.debug(f"refreshInfomaniakToken: HTTP response status: {response.status_code}")
-
- if response.status_code == 200:
- tokenData = response.json()
- if "access_token" not in tokenData:
- logger.error("Infomaniak token refresh response missing access_token")
- return None
-
- newToken = Token(
- userId=userId,
- authority=AuthAuthority.INFOMANIAK,
- connectionId=oldToken.connectionId,
- tokenPurpose=TokenPurpose.DATA_CONNECTION,
- tokenAccess=tokenData["access_token"],
- tokenRefresh=tokenData.get("refresh_token", refreshToken),
- tokenType=tokenData.get("token_type", "bearer"),
- expiresAt=createExpirationTimestamp(tokenData.get("expires_in", 3600)),
- createdAt=getUtcTimestamp(),
- )
- return newToken
-
- logger.error(
- f"Failed to refresh Infomaniak token: {response.status_code} - {response.text}"
- )
- if response.status_code == 400:
- try:
- errorData = response.json()
- if errorData.get("error") == "invalid_grant":
- logger.warning(
- "Infomaniak refresh token is invalid or expired - user needs to re-authenticate"
- )
- except Exception:
- pass
- return None
-
- except Exception as e:
- logger.error(f"Error refreshing Infomaniak token: {str(e)}")
- return None
-
def refreshToken(self, oldToken: Token) -> Optional[Token]:
"""Refresh an expired token using the appropriate OAuth service"""
try:
@@ -268,9 +206,6 @@ class TokenManager:
elif oldToken.authority == AuthAuthority.GOOGLE:
logger.debug(f"refreshToken: Refreshing Google token")
return self.refreshGoogleToken(oldToken.tokenRefresh, oldToken.userId, oldToken)
- elif oldToken.authority == AuthAuthority.INFOMANIAK:
- logger.debug(f"refreshToken: Refreshing Infomaniak token")
- return self.refreshInfomaniakToken(oldToken.tokenRefresh, oldToken.userId, oldToken)
else:
logger.warning(f"Unknown authority for token refresh: {oldToken.authority}")
return None
diff --git a/modules/auth/tokenRefreshService.py b/modules/auth/tokenRefreshService.py
index a69db085..5f243b3f 100644
--- a/modules/auth/tokenRefreshService.py
+++ b/modules/auth/tokenRefreshService.py
@@ -144,45 +144,6 @@ class TokenRefreshService:
logger.error(f"Error refreshing Microsoft token for connection {connection.id}: {str(e)}")
return False
- async def _refresh_infomaniak_token(self, interface, connection: UserConnection) -> bool:
- """Refresh Infomaniak OAuth token"""
- try:
- logger.debug(f"Refreshing Infomaniak token for connection {connection.id}")
-
- current_token = interface.getConnectionToken(connection.id)
- if not current_token:
- logger.warning(f"No Infomaniak token found for connection {connection.id}")
- return False
-
- from modules.auth.tokenManager import TokenManager
- token_manager = TokenManager()
-
- refreshedToken = token_manager.refreshToken(current_token)
- if refreshedToken:
- interface.saveConnectionToken(refreshedToken)
- interface.db.recordModify(UserConnection, connection.id, {
- "lastChecked": getUtcTimestamp(),
- "expiresAt": refreshedToken.expiresAt,
- })
- logger.info(f"Successfully refreshed Infomaniak token for connection {connection.id}")
- try:
- audit_logger.logSecurityEvent(
- userId=str(connection.userId),
- mandateId="system",
- action="token_refresh",
- details=f"Infomaniak token refreshed for connection {connection.id}",
- )
- except Exception:
- pass
- return True
-
- logger.warning(f"Failed to refresh Infomaniak token for connection {connection.id}")
- return False
-
- except Exception as e:
- logger.error(f"Error refreshing Infomaniak token for connection {connection.id}: {str(e)}")
- return False
-
async def refresh_expired_tokens(self, user_id: str) -> Dict[str, Any]:
"""
Refresh expired OAuth tokens for a user
@@ -216,7 +177,7 @@ class TokenRefreshService:
for connection in connections:
# Only refresh expired OAuth connections
if (connection.tokenStatus == 'expired' and
- connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT, AuthAuthority.INFOMANIAK]):
+ connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT]):
# Check rate limiting
if self._is_rate_limited(connection.id):
@@ -233,8 +194,6 @@ class TokenRefreshService:
success = await self._refresh_google_token(root_interface, connection)
elif connection.authority == AuthAuthority.MSFT:
success = await self._refresh_microsoft_token(root_interface, connection)
- elif connection.authority == AuthAuthority.INFOMANIAK:
- success = await self._refresh_infomaniak_token(root_interface, connection)
if success:
refreshed_count += 1
@@ -289,7 +248,7 @@ class TokenRefreshService:
# Only refresh active tokens that expire soon
if (connection.tokenStatus == 'active' and
connection.tokenExpiresAt and
- connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT, AuthAuthority.INFOMANIAK]):
+ connection.authority in [AuthAuthority.GOOGLE, AuthAuthority.MSFT]):
# Check if token expires within 5 minutes
time_until_expiry = connection.tokenExpiresAt - current_time
@@ -310,8 +269,6 @@ class TokenRefreshService:
success = await self._refresh_google_token(root_interface, connection)
elif connection.authority == AuthAuthority.MSFT:
success = await self._refresh_microsoft_token(root_interface, connection)
- elif connection.authority == AuthAuthority.INFOMANIAK:
- success = await self._refresh_infomaniak_token(root_interface, connection)
if success:
refreshed_count += 1
diff --git a/modules/connectors/providerGoogle/connectorGoogle.py b/modules/connectors/providerGoogle/connectorGoogle.py
index 2baf49db..46fc6c54 100644
--- a/modules/connectors/providerGoogle/connectorGoogle.py
+++ b/modules/connectors/providerGoogle/connectorGoogle.py
@@ -14,6 +14,8 @@ logger = logging.getLogger(__name__)
_DRIVE_BASE = "https://www.googleapis.com/drive/v3"
_GMAIL_BASE = "https://gmail.googleapis.com/gmail/v1"
+_CALENDAR_BASE = "https://www.googleapis.com/calendar/v3"
+_PEOPLE_BASE = "https://people.googleapis.com/v1"
async def _googleGet(token: str, url: str) -> Dict[str, Any]:
@@ -274,12 +276,480 @@ class GmailAdapter(ServiceAdapter):
]
+class CalendarAdapter(ServiceAdapter):
+ """Google Calendar ServiceAdapter -- browse calendars, list events, .ics download.
+
+ Path conventions:
+ ``""`` / ``"/"`` -> list calendars from ``calendarList``
+ ``"/"`` -> list upcoming events in that calendar
+ ``"//"`` -> reserved for future event detail browse
+ """
+
+ _DEFAULT_EVENT_LIMIT = 100
+ _MAX_EVENT_LIMIT = 2500
+
+ def __init__(self, accessToken: str):
+ self._token = accessToken
+
+ async def browse(
+ self,
+ path: str,
+ filter: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ cleanPath = (path or "").strip("/")
+ if not cleanPath:
+ url = f"{_CALENDAR_BASE}/users/me/calendarList?maxResults=250"
+ result = await _googleGet(self._token, url)
+ if "error" in result:
+ logger.warning(f"Google Calendar list failed: {result['error']}")
+ return []
+ calendars = result.get("items", [])
+ if filter:
+ f = filter.lower()
+ calendars = [c for c in calendars if f in (c.get("summary") or "").lower()]
+ return [
+ ExternalEntry(
+ name=c.get("summaryOverride") or c.get("summary", ""),
+ path=f"/{c.get('id', '')}",
+ isFolder=True,
+ metadata={
+ "id": c.get("id"),
+ "primary": c.get("primary", False),
+ "accessRole": c.get("accessRole"),
+ "backgroundColor": c.get("backgroundColor"),
+ "timeZone": c.get("timeZone"),
+ },
+ )
+ for c in calendars
+ ]
+
+ from urllib.parse import quote
+ calendarId = cleanPath.split("/", 1)[0]
+ effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
+ url = (
+ f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
+ f"?maxResults={effectiveLimit}&orderBy=startTime&singleEvents=true"
+ )
+ result = await _googleGet(self._token, url)
+ if "error" in result:
+ logger.warning(f"Google Calendar events failed: {result['error']}")
+ return []
+ events = result.get("items", [])
+ return [
+ ExternalEntry(
+ name=ev.get("summary", "(no title)"),
+ path=f"/{calendarId}/{ev.get('id', '')}",
+ isFolder=False,
+ mimeType="text/calendar",
+ metadata={
+ "id": ev.get("id"),
+ "start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
+ "end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
+ "location": ev.get("location"),
+ "organizer": (ev.get("organizer") or {}).get("email"),
+ "htmlLink": ev.get("htmlLink"),
+ "status": ev.get("status"),
+ },
+ )
+ for ev in events
+ ]
+
+ async def download(self, path: str) -> DownloadResult:
+ from urllib.parse import quote
+ cleanPath = (path or "").strip("/")
+ if "/" not in cleanPath:
+ return DownloadResult()
+ calendarId, eventId = cleanPath.split("/", 1)
+ url = f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events/{quote(eventId, safe='')}"
+ ev = await _googleGet(self._token, url)
+ if "error" in ev:
+ logger.warning(f"Google Calendar event fetch failed: {ev['error']}")
+ return DownloadResult()
+ icsBytes = _googleEventToIcs(ev)
+ summary = ev.get("summary") or eventId
+ safeName = _googleSafeFileName(summary) or "event"
+ return DownloadResult(
+ data=icsBytes,
+ fileName=f"{safeName}.ics",
+ mimeType="text/calendar",
+ )
+
+ async def upload(self, path: str, data: bytes, fileName: str) -> dict:
+ return {"error": "Google Calendar upload not supported"}
+
+ async def search(
+ self,
+ query: str,
+ path: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ from urllib.parse import quote
+ calendarId = (path or "").strip("/").split("/", 1)[0] or "primary"
+ effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
+ url = (
+ f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
+ f"?q={quote(query, safe='')}&maxResults={effectiveLimit}&singleEvents=true"
+ )
+ result = await _googleGet(self._token, url)
+ if "error" in result:
+ return []
+ return [
+ ExternalEntry(
+ name=ev.get("summary", "(no title)"),
+ path=f"/{calendarId}/{ev.get('id', '')}",
+ isFolder=False,
+ mimeType="text/calendar",
+ metadata={
+ "id": ev.get("id"),
+ "start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
+ "end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
+ },
+ )
+ for ev in result.get("items", [])
+ ]
+
+
+class ContactsAdapter(ServiceAdapter):
+ """Google Contacts ServiceAdapter -- People API (read-only).
+
+ Path conventions:
+ ``""`` / ``"/"`` -> list contact groups (incl. virtual ``all`` for the user's connections)
+ ``"/all"`` -> list all ``people/me/connections``
+ ``"/"`` -> list members of that contact group (e.g. ``contactGroups/myFriends``)
+ ``"//"`` -> reserved for future detail browse;
+ ``personId`` is the suffix after ``people/``
+ """
+
+ _DEFAULT_CONTACT_LIMIT = 200
+ _MAX_CONTACT_LIMIT = 1000
+ _PERSON_FIELDS = (
+ "names,emailAddresses,phoneNumbers,organizations,addresses,biographies,memberships"
+ )
+
+ def __init__(self, accessToken: str):
+ self._token = accessToken
+
+ async def browse(
+ self,
+ path: str,
+ filter: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ cleanPath = (path or "").strip("/")
+ if not cleanPath:
+ entries: List[ExternalEntry] = [
+ ExternalEntry(
+ name="Alle Kontakte",
+ path="/all",
+ isFolder=True,
+ metadata={"id": "all", "isVirtual": True},
+ ),
+ ]
+ url = f"{_PEOPLE_BASE}/contactGroups?pageSize=200"
+ result = await _googleGet(self._token, url)
+ if "error" not in result:
+ for grp in result.get("contactGroups", []):
+ name = grp.get("formattedName") or grp.get("name") or ""
+ if not name:
+ continue
+ entries.append(
+ ExternalEntry(
+ name=name,
+ path=f"/{grp.get('resourceName', '')}",
+ isFolder=True,
+ metadata={
+ "id": grp.get("resourceName"),
+ "memberCount": grp.get("memberCount", 0),
+ "groupType": grp.get("groupType"),
+ },
+ )
+ )
+ else:
+ logger.warning(f"Google contactGroups list failed: {result['error']}")
+ return entries
+
+ from urllib.parse import quote
+ effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
+ groupRef = cleanPath.split("/", 1)[0]
+ if groupRef == "all":
+ url = (
+ f"{_PEOPLE_BASE}/people/me/connections"
+ f"?pageSize={min(effectiveLimit, 1000)}&personFields={self._PERSON_FIELDS}"
+ )
+ result = await _googleGet(self._token, url)
+ if "error" in result:
+ logger.warning(f"Google People connections failed: {result['error']}")
+ return []
+ people = result.get("connections", [])
+ else:
+ groupResource = groupRef
+ grpUrl = (
+ f"{_PEOPLE_BASE}/{quote(groupResource, safe='/')}"
+ f"?maxMembers={min(effectiveLimit, 1000)}"
+ )
+ grpResult = await _googleGet(self._token, grpUrl)
+ if "error" in grpResult:
+ logger.warning(f"Google contactGroup detail failed: {grpResult['error']}")
+ return []
+ memberResourceNames = grpResult.get("memberResourceNames") or []
+ if not memberResourceNames:
+ return []
+ chunkSize = 200
+ people: List[Dict[str, Any]] = []
+ for i in range(0, min(len(memberResourceNames), effectiveLimit), chunkSize):
+ chunk = memberResourceNames[i : i + chunkSize]
+ params = "&".join(f"resourceNames={quote(rn, safe='/')}" for rn in chunk)
+ batchUrl = f"{_PEOPLE_BASE}/people:batchGet?{params}&personFields={self._PERSON_FIELDS}"
+ batchResult = await _googleGet(self._token, batchUrl)
+ if "error" in batchResult:
+ logger.warning(f"Google People batchGet failed: {batchResult['error']}")
+ continue
+ for resp in batchResult.get("responses", []):
+ person = resp.get("person")
+ if person:
+ people.append(person)
+ if len(people) >= effectiveLimit:
+ break
+
+ return [
+ ExternalEntry(
+ name=_googlePersonLabel(p) or "(no name)",
+ path=f"/{groupRef}/{(p.get('resourceName', '') or '').split('/')[-1]}",
+ isFolder=False,
+ mimeType="text/vcard",
+ metadata={
+ "id": p.get("resourceName"),
+ "emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
+ "phones": [pn.get("value") for pn in (p.get("phoneNumbers") or []) if pn.get("value")],
+ "organization": (p.get("organizations") or [{}])[0].get("name") if p.get("organizations") else None,
+ },
+ )
+ for p in people[:effectiveLimit]
+ ]
+
+ async def download(self, path: str) -> DownloadResult:
+ from urllib.parse import quote
+ cleanPath = (path or "").strip("/")
+ if "/" not in cleanPath:
+ return DownloadResult()
+ personSuffix = cleanPath.split("/")[-1]
+ if not personSuffix:
+ return DownloadResult()
+ url = f"{_PEOPLE_BASE}/people/{quote(personSuffix, safe='')}?personFields={self._PERSON_FIELDS}"
+ person = await _googleGet(self._token, url)
+ if "error" in person:
+ logger.warning(f"Google People fetch failed: {person['error']}")
+ return DownloadResult()
+ vcfBytes = _googlePersonToVcard(person)
+ label = _googlePersonLabel(person) or personSuffix
+ safeName = _googleSafeFileName(label) or "contact"
+ return DownloadResult(
+ data=vcfBytes,
+ fileName=f"{safeName}.vcf",
+ mimeType="text/vcard",
+ )
+
+ async def upload(self, path: str, data: bytes, fileName: str) -> dict:
+ return {"error": "Google Contacts upload not supported"}
+
+ async def search(
+ self,
+ query: str,
+ path: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ from urllib.parse import quote
+ effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
+ url = (
+ f"{_PEOPLE_BASE}/people:searchContacts"
+ f"?query={quote(query, safe='')}&pageSize={min(effectiveLimit, 30)}"
+ f"&readMask={self._PERSON_FIELDS}"
+ )
+ result = await _googleGet(self._token, url)
+ if "error" in result:
+ return []
+ entries: List[ExternalEntry] = []
+ for r in result.get("results", []):
+ p = r.get("person") or {}
+ entries.append(
+ ExternalEntry(
+ name=_googlePersonLabel(p) or "(no name)",
+ path=f"/search/{(p.get('resourceName', '') or '').split('/')[-1]}",
+ isFolder=False,
+ mimeType="text/vcard",
+ metadata={
+ "id": p.get("resourceName"),
+ "emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
+ },
+ )
+ )
+ return entries
+
+
+def _googleSafeFileName(name: str) -> str:
+ import re
+ return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
+
+
+def _googleIcsEscape(value: str) -> str:
+ if value is None:
+ return ""
+ return (
+ value.replace("\\", "\\\\")
+ .replace(";", "\\;")
+ .replace(",", "\\,")
+ .replace("\r\n", "\\n")
+ .replace("\n", "\\n")
+ )
+
+
+def _googleIcsDateTime(value: Optional[str]) -> Optional[str]:
+ """Convert a Google Calendar dateTime/date string to RFC 5545 format (UTC)."""
+ if not value:
+ return None
+ from datetime import datetime, timezone
+ try:
+ if "T" not in value:
+ dt = datetime.strptime(value, "%Y-%m-%d")
+ return dt.strftime("%Y%m%d")
+ normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
+ dt = datetime.fromisoformat(normalized)
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
+ except (TypeError, ValueError):
+ return None
+
+
+def _googleEventToIcs(event: Dict[str, Any]) -> bytes:
+ """Build a minimal RFC 5545 VCALENDAR/VEVENT for a Google Calendar event."""
+ from datetime import datetime, timezone
+ uid = event.get("iCalUID") or event.get("id") or "unknown@poweron"
+ summary = _googleIcsEscape(event.get("summary") or "")
+ location = _googleIcsEscape(event.get("location") or "")
+ description = _googleIcsEscape(event.get("description") or "")
+ rawStart = (event.get("start") or {}).get("dateTime") or (event.get("start") or {}).get("date")
+ rawEnd = (event.get("end") or {}).get("dateTime") or (event.get("end") or {}).get("date")
+ isAllDay = bool((event.get("start") or {}).get("date") and not (event.get("start") or {}).get("dateTime"))
+ dtstart = _googleIcsDateTime(rawStart)
+ dtend = _googleIcsDateTime(rawEnd)
+ dtstamp = _googleIcsDateTime(event.get("updated")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
+
+ lines = [
+ "BEGIN:VCALENDAR",
+ "VERSION:2.0",
+ "PRODID:-//PowerOn//Google-Calendar-Adapter//EN",
+ "CALSCALE:GREGORIAN",
+ "BEGIN:VEVENT",
+ f"UID:{uid}",
+ f"DTSTAMP:{dtstamp}",
+ ]
+ if dtstart:
+ lines.append(f"DTSTART;VALUE=DATE:{dtstart}" if isAllDay else f"DTSTART:{dtstart}")
+ if dtend:
+ lines.append(f"DTEND;VALUE=DATE:{dtend}" if isAllDay else f"DTEND:{dtend}")
+ if summary:
+ lines.append(f"SUMMARY:{summary}")
+ if location:
+ lines.append(f"LOCATION:{location}")
+ if description:
+ lines.append(f"DESCRIPTION:{description}")
+ organizer = (event.get("organizer") or {}).get("email")
+ if organizer:
+ lines.append(f"ORGANIZER:mailto:{organizer}")
+ for att in (event.get("attendees") or []):
+ addr = att.get("email")
+ if addr:
+ lines.append(f"ATTENDEE:mailto:{addr}")
+ lines.append("END:VEVENT")
+ lines.append("END:VCALENDAR")
+ return ("\r\n".join(lines) + "\r\n").encode("utf-8")
+
+
+def _googlePersonLabel(person: Dict[str, Any]) -> str:
+ names = person.get("names") or []
+ if names:
+ primary = names[0]
+ display = primary.get("displayName") or ""
+ if display:
+ return display
+ given = primary.get("givenName") or ""
+ family = primary.get("familyName") or ""
+ full = f"{given} {family}".strip()
+ if full:
+ return full
+ orgs = person.get("organizations") or []
+ if orgs and orgs[0].get("name"):
+ return orgs[0]["name"]
+ emails = person.get("emailAddresses") or []
+ if emails and emails[0].get("value"):
+ return emails[0]["value"]
+ return ""
+
+
+def _googlePersonToVcard(person: Dict[str, Any]) -> bytes:
+ """Build a vCard 3.0 from a Google People API person payload."""
+ names = person.get("names") or []
+ primaryName = names[0] if names else {}
+ given = primaryName.get("givenName") or ""
+ family = primaryName.get("familyName") or ""
+ middle = primaryName.get("middleName") or ""
+ fn = primaryName.get("displayName") or _googlePersonLabel(person) or ""
+
+ lines = [
+ "BEGIN:VCARD",
+ "VERSION:3.0",
+ f"N:{family};{given};{middle};;",
+ f"FN:{fn}",
+ ]
+ orgs = person.get("organizations") or []
+ if orgs:
+ org = orgs[0]
+ orgVal = org.get("name") or ""
+ if org.get("department"):
+ orgVal = f"{orgVal};{org['department']}"
+ if orgVal:
+ lines.append(f"ORG:{orgVal}")
+ if org.get("title"):
+ lines.append(f"TITLE:{org['title']}")
+ for em in (person.get("emailAddresses") or []):
+ addr = em.get("value")
+ if not addr:
+ continue
+ emailType = (em.get("type") or "INTERNET").upper()
+ lines.append(f"EMAIL;TYPE={emailType}:{addr}")
+ for ph in (person.get("phoneNumbers") or []):
+ val = ph.get("value")
+ if not val:
+ continue
+ phType = (ph.get("type") or "VOICE").upper()
+ lines.append(f"TEL;TYPE={phType}:{val}")
+ for addr in (person.get("addresses") or []):
+ street = addr.get("streetAddress") or ""
+ city = addr.get("city") or ""
+ region = addr.get("region") or ""
+ postal = addr.get("postalCode") or ""
+ country = addr.get("country") or ""
+ if any([street, city, region, postal, country]):
+ adrType = (addr.get("type") or "OTHER").upper()
+ lines.append(f"ADR;TYPE={adrType}:;;{street};{city};{region};{postal};{country}")
+ bios = person.get("biographies") or []
+ if bios and bios[0].get("value"):
+ lines.append(f"NOTE:{_googleIcsEscape(bios[0]['value'])}")
+ lines.append(f"UID:{person.get('resourceName', '')}")
+ lines.append("END:VCARD")
+ return ("\r\n".join(lines) + "\r\n").encode("utf-8")
+
+
class GoogleConnector(ProviderConnector):
- """Google ProviderConnector -- 1 connection -> Drive + Gmail."""
+ """Google ProviderConnector -- 1 connection -> Drive + Gmail + Calendar + Contacts."""
_SERVICE_MAP = {
"drive": DriveAdapter,
"gmail": GmailAdapter,
+ "calendar": CalendarAdapter,
+ "contact": ContactsAdapter,
}
def getAvailableServices(self) -> List[str]:
diff --git a/modules/connectors/providerInfomaniak/connectorInfomaniak.py b/modules/connectors/providerInfomaniak/connectorInfomaniak.py
index a96efe72..80fa4d17 100644
--- a/modules/connectors/providerInfomaniak/connectorInfomaniak.py
+++ b/modules/connectors/providerInfomaniak/connectorInfomaniak.py
@@ -1,24 +1,41 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
-"""Infomaniak ProviderConnector -- kDrive and Mail via Infomaniak OAuth.
+"""Infomaniak ProviderConnector -- kDrive + Calendar + Contacts via PAT.
-All ServiceAdapters share the same OAuth access token obtained from the
-UserConnection (authority=infomaniak).
+The PAT carries one or more of these scopes:
-Path conventions (leading slash):
- kDrive:
- / -- list drives the user has access to
- /{driveId} -- root folder of a drive (children)
+- ``drive`` -> kDrive (active here)
+- ``workspace:calendar`` -> Calendar (active here)
+- ``workspace:contact`` -> Contacts (active here)
+- ``workspace:mail`` -> Mail (no public PAT-friendly endpoint yet)
+
+Mail is intentionally NOT in ``_SERVICE_MAP`` until we find a
+PAT-authenticated endpoint -- the public ``/1/mail`` and
+``mail.infomaniak.com/api/pim/mail*`` routes either don't exist (404
+nginx) or 302 to OAuth, so wiring a stub adapter would only confuse
+users.
+
+Path conventions (leading slash, ``ServiceAdapter`` paths always start with
+``/``):
+ kDrive (api.infomaniak.com, requires ``account_id`` query arg):
+ / -- list drives in the user's account
+ /{driveId} -- root folder of a drive
/{driveId}/{fileId} -- folder children OR file (download)
- Mail:
- / -- list user's mailboxes
- /{mailboxId} -- folders in mailbox
- /{mailboxId}/{folderId} -- messages in folder
- /{mailboxId}/{folderId}/{uid} -- single message (download as .eml)
+ Calendar (calendar.infomaniak.com PIM):
+ / -- list calendars accessible to the user
+ /{calendarId} -- events of one calendar
+ /{calendarId}/{eventId} -- single event (.ics download)
+ Contacts (contacts.infomaniak.com PIM):
+ / -- list address books
+ /{addressBookId} -- contacts in that address book
+ /{addressBookId}/{contactId} -- single contact (.vcf download)
"""
import logging
-from typing import Any, Dict, List, Optional
+import re
+from datetime import datetime, timedelta, timezone
+from typing import Any, Dict, List, Optional, TypedDict
+from urllib.parse import quote
import aiohttp
@@ -32,39 +49,74 @@ from modules.datamodels.datamodelDataSource import ExternalEntry
logger = logging.getLogger(__name__)
_API_BASE = "https://api.infomaniak.com"
+_CALENDAR_BASE = "https://calendar.infomaniak.com"
+_CONTACTS_BASE = "https://contacts.infomaniak.com"
+_PIM_PREFIX = "/api/pim"
-async def _infomaniakGet(token: str, endpoint: str) -> Dict[str, Any]:
- """Single GET call against the Infomaniak API. Returns parsed JSON or {'error': ...}."""
- url = f"{_API_BASE}/{endpoint.lstrip('/')}"
+class InfomaniakOwnerIdentity(TypedDict):
+ """Minimal identity payload for the PAT owner.
+
+ ``accountId`` is the only field the kDrive adapter needs at runtime.
+ ``displayName`` is harvested for the connection UI; both fields come
+ from the same PIM Owner record.
+ """
+
+ accountId: int
+ displayName: Optional[str]
+
+
+class InfomaniakIdentityError(RuntimeError):
+ """Raised when no owner identity can be derived from a PAT."""
+
+
+async def _infomaniakGet(
+ token: str,
+ endpoint: str,
+ baseUrl: str = _API_BASE,
+) -> Dict[str, Any]:
+ """Single GET against an Infomaniak host.
+
+ ``endpoint`` is appended to ``baseUrl`` (handles leading slash). Returns
+ parsed JSON, or ``{'error': ...}`` for non-2xx / network failures.
+ """
+ url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
headers = {"Authorization": f"Bearer {token}", "Accept": "application/json"}
timeout = aiohttp.ClientTimeout(total=20)
try:
async with aiohttp.ClientSession(timeout=timeout) as session:
- async with session.get(url, headers=headers) as resp:
+ async with session.get(url, headers=headers, allow_redirects=False) as resp:
if resp.status in (200, 201):
return await resp.json()
errorText = await resp.text()
- logger.warning(f"Infomaniak API {resp.status}: {errorText[:300]}")
+ logger.warning(f"Infomaniak GET {url} -> {resp.status}: {errorText[:300]}")
return {"error": f"{resp.status}: {errorText[:200]}"}
except Exception as e:
+ logger.error(f"Infomaniak GET {url} crashed: {e}")
return {"error": str(e)}
-async def _infomaniakDownload(token: str, endpoint: str) -> Optional[bytes]:
- """Binary download from the Infomaniak API. Returns bytes or None on error."""
- url = f"{_API_BASE}/{endpoint.lstrip('/')}"
+async def _infomaniakDownload(
+ token: str,
+ endpoint: str,
+ baseUrl: str = _API_BASE,
+) -> Optional[bytes]:
+ """Binary download from an Infomaniak host. Returns bytes or ``None``."""
+ url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
headers = {"Authorization": f"Bearer {token}"}
timeout = aiohttp.ClientTimeout(total=120)
try:
async with aiohttp.ClientSession(timeout=timeout) as session:
- async with session.get(url, headers=headers) as resp:
+ async with session.get(url, headers=headers, allow_redirects=False) as resp:
if resp.status == 200:
return await resp.read()
- logger.warning(f"Infomaniak download {resp.status}: {(await resp.text())[:300]}")
+ logger.warning(
+ f"Infomaniak download {url} -> {resp.status}: "
+ f"{(await resp.text())[:300]}"
+ )
return None
except Exception as e:
- logger.error(f"Infomaniak download error: {e}")
+ logger.error(f"Infomaniak download {url} crashed: {e}")
return None
@@ -75,11 +127,136 @@ def _unwrapData(payload: Any) -> Any:
return payload
+def _firstOwnerRecord(payload: Any, listKey: str) -> Optional[Dict[str, Any]]:
+ """Pick the first user-owned record from a PIM list response.
+
+ Both PIM Calendar (``calendars``) and PIM Contacts (``addressbooks``)
+ return ``{result, data: {: [...]}}``. Owner-records have a
+ positive numeric ``user_id`` and an integer ``account_id``; shared /
+ public records (e.g. holiday calendars) carry ``user_id = -1`` and
+ ``account_id = null`` and are skipped.
+ """
+ data = _unwrapData(payload) if payload else None
+ if not isinstance(data, dict):
+ return None
+ records = data.get(listKey)
+ if not isinstance(records, list):
+ return None
+ for rec in records:
+ if not isinstance(rec, dict):
+ continue
+ userId = rec.get("user_id")
+ accountId = rec.get("account_id")
+ if isinstance(userId, int) and userId > 0 and isinstance(accountId, int):
+ return rec
+ return None
+
+
+async def resolveOwnerIdentity(token: str) -> InfomaniakOwnerIdentity:
+ """Derive the PAT owner's display identity from PIM Calendar / Contacts.
+
+ Used purely for UI display on the connection (``externalUsername`` /
+ ``externalId``). The PIM endpoints embed the kSuite ``account_id``
+ and the user's display name in their owner records, which is what
+ the ConnectionsPage shows.
+
+ Calendar is queried first because it is the more universally
+ provisioned PIM service; Contacts is the equivalent fallback.
+ Raises :class:`InfomaniakIdentityError` when neither yields an
+ owner record.
+ """
+ sources = (
+ (_CALENDAR_BASE, f"{_PIM_PREFIX}/calendar", "calendars"),
+ (_CONTACTS_BASE, f"{_PIM_PREFIX}/addressbook", "addressbooks"),
+ )
+ for baseUrl, endpoint, listKey in sources:
+ payload = await _infomaniakGet(token, endpoint, baseUrl=baseUrl)
+ if isinstance(payload, dict) and payload.get("error"):
+ continue
+ owner = _firstOwnerRecord(payload, listKey)
+ if owner is None:
+ continue
+ return InfomaniakOwnerIdentity(
+ accountId=int(owner["account_id"]),
+ displayName=owner.get("name") or None,
+ )
+ raise InfomaniakIdentityError(
+ "Could not resolve Infomaniak owner identity from PIM Calendar or "
+ "Contacts. The PAT must carry 'workspace:calendar' or "
+ "'workspace:contact' so we can label the connection."
+ )
+
+
+async def resolveAccessibleAccountIds(token: str) -> List[int]:
+ """Return every Infomaniak account_id the PAT has access to.
+
+ Hits ``GET /1/accounts`` -- the only Infomaniak endpoint that lists
+ *all* account_ids of a token in one call. Requires the PAT scope
+ ``accounts`` (Infomaniak responds 403 with
+ ``code: 'all_scopes', context: {scopes: ['accounts']}`` if missing).
+
+ The kSuite account_id from PIM (``resolveOwnerIdentity``) is **not**
+ sufficient for kDrive: a standalone or free-tier kDrive lives on a
+ different account_id than its kSuite counterpart. ``/2/drive`` is
+ queried per account_id, so we resolve them all here and union the
+ drive listings in :class:`KdriveAdapter`.
+
+ Raises :class:`InfomaniakIdentityError` when the PAT does not carry
+ the ``accounts`` scope or the response is malformed.
+ """
+ payload = await _infomaniakGet(token, "/1/accounts")
+ if isinstance(payload, dict) and payload.get("error"):
+ raise InfomaniakIdentityError(
+ "Could not list Infomaniak accounts. The PAT must carry the "
+ "'accounts' scope so kDrive can discover the owning account "
+ f"(/1/accounts said: {payload['error']})."
+ )
+ data = _unwrapData(payload)
+ if not isinstance(data, list):
+ raise InfomaniakIdentityError(
+ "Unexpected /1/accounts response shape (expected a list)."
+ )
+ accountIds: List[int] = []
+ for entry in data:
+ if not isinstance(entry, dict):
+ continue
+ accountId = entry.get("id")
+ if isinstance(accountId, int):
+ accountIds.append(accountId)
+ if not accountIds:
+ raise InfomaniakIdentityError(
+ "/1/accounts returned no accounts -- the PAT cannot reach any "
+ "Infomaniak account."
+ )
+ return accountIds
+
+
class KdriveAdapter(ServiceAdapter):
- """kDrive ServiceAdapter -- browse drives, folders, and files."""
+ """kDrive ServiceAdapter -- browse drives, folders, files within all
+ accounts the PAT can reach.
+
+ Infomaniak's ``/2/drive`` listing endpoint requires the integer
+ ``account_id`` of the *drive-owning* account as a query arg. A user
+ may own kDrives in several accounts (typically a kSuite account
+ plus a standalone / free-tier kDrive account), and the kSuite
+ account_id from PIM does **not** cover the standalone case.
+
+ The only PAT-friendly way to enumerate every account_id is
+ :func:`resolveAccessibleAccountIds` (``GET /1/accounts`` with the
+ ``accounts`` scope). This adapter therefore resolves the full
+ account list once per instance and unions the ``/2/drive`` listing
+ across all of them in :meth:`_listDrives`.
+ """
def __init__(self, accessToken: str):
self._token = accessToken
+ self._accountIds: Optional[List[int]] = None
+
+ async def _ensureAccountIds(self) -> List[int]:
+ if self._accountIds is not None:
+ return self._accountIds
+ self._accountIds = await resolveAccessibleAccountIds(self._token)
+ return self._accountIds
async def browse(
self,
@@ -101,26 +278,41 @@ class KdriveAdapter(ServiceAdapter):
return await self._listChildren(driveId, fileId=fileId, limit=limit)
async def _listDrives(self) -> List[ExternalEntry]:
- result = await _infomaniakGet(self._token, "/2/drive")
- if isinstance(result, dict) and result.get("error"):
- logger.warning(f"kDrive list-drives failed: {result['error']}")
- return []
- data = _unwrapData(result)
- drives = data.get("drives", {}).get("accounts", []) if isinstance(data, dict) else []
- if not drives and isinstance(data, list):
- drives = data
+ accountIds = await self._ensureAccountIds()
+ seen: set = set()
entries: List[ExternalEntry] = []
- for drive in drives:
- driveId = str(drive.get("id", ""))
- if not driveId:
+ for accountId in accountIds:
+ result = await _infomaniakGet(
+ self._token, f"/2/drive?account_id={accountId}"
+ )
+ if isinstance(result, dict) and result.get("error"):
+ logger.warning(
+ f"kDrive list-drives for account {accountId} failed: {result['error']}"
+ )
continue
- name = drive.get("name") or driveId
- entries.append(ExternalEntry(
- name=name,
- path=f"/{driveId}",
- isFolder=True,
- metadata={"id": driveId, "kind": "drive"},
- ))
+ data = _unwrapData(result)
+ drives: List[Dict[str, Any]]
+ if isinstance(data, list):
+ drives = [d for d in data if isinstance(d, dict)]
+ elif isinstance(data, dict):
+ drives = data.get("drives", {}).get("accounts", []) or []
+ else:
+ drives = []
+ for drive in drives:
+ driveId = str(drive.get("id", ""))
+ if not driveId or driveId in seen:
+ continue
+ seen.add(driveId)
+ entries.append(ExternalEntry(
+ name=drive.get("name") or driveId,
+ path=f"/{driveId}",
+ isFolder=True,
+ metadata={
+ "id": driveId,
+ "kind": "drive",
+ "accountId": accountId,
+ },
+ ))
return entries
async def _listChildren(
@@ -129,9 +321,6 @@ class KdriveAdapter(ServiceAdapter):
fileId: Optional[str],
limit: Optional[int],
) -> List[ExternalEntry]:
- # Infomaniak treats every folder (including drive root) as a file-id.
- # When fileId is None, we ask the drive for root children via the
- # documented `/files` collection endpoint.
if fileId is None:
endpoint = f"/2/drive/{driveId}/files"
else:
@@ -142,7 +331,9 @@ class KdriveAdapter(ServiceAdapter):
result = await _infomaniakGet(self._token, endpoint)
if isinstance(result, dict) and result.get("error"):
- logger.warning(f"kDrive list-children {driveId}/{fileId or 'root'} failed: {result['error']}")
+ logger.warning(
+ f"kDrive list-children {driveId}/{fileId or 'root'} failed: {result['error']}"
+ )
return []
data = _unwrapData(result)
items = data if isinstance(data, list) else data.get("items", []) if isinstance(data, dict) else []
@@ -179,7 +370,9 @@ class KdriveAdapter(ServiceAdapter):
fileName = data.get("name") or fileId
mimeType = data.get("mime_type") or mimeType
- content = await _infomaniakDownload(self._token, f"/2/drive/{driveId}/files/{fileId}/download")
+ content = await _infomaniakDownload(
+ self._token, f"/2/drive/{driveId}/files/{fileId}/download"
+ )
if content is None:
return DownloadResult()
return DownloadResult(data=content, fileName=fileName, mimeType=mimeType)
@@ -227,11 +420,40 @@ class KdriveAdapter(ServiceAdapter):
return entries
-class MailAdapter(ServiceAdapter):
- """Infomaniak Mail ServiceAdapter -- browse mailboxes, folders and messages."""
+def _safeFileName(label: str, fallback: str) -> str:
+ """Sanitize a string for use as a filename. Trims and caps at 80 chars."""
+ cleaned = re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", str(label or "")).strip(". ")
+ return cleaned[:80] or fallback
- _DEFAULT_MESSAGE_LIMIT = 100
- _MAX_MESSAGE_LIMIT = 500
+
+class CalendarAdapter(ServiceAdapter):
+ """Infomaniak Calendar adapter -- browse calendars + events, .ics download.
+
+ Uses the public PIM endpoints at ``calendar.infomaniak.com/api/pim``,
+ which authenticate with the PAT scope ``workspace:calendar``.
+
+ Path layout:
+ ``/`` -> list calendars
+ ``/{calendarId}`` -> list events of that calendar
+ ``/{calendarId}/{eventId}`` -> single event (download as .ics)
+
+ Endpoint particulars:
+ Listing events runs against ``/api/pim/event`` with the calendar
+ id as a query arg (the per-calendar nested route
+ ``/calendar/{id}/event`` is **not** PAT-friendly -- it 302s to the
+ OAuth login page). Infomaniak enforces a hard ``from``/``to``
+ window of less than 3 months, so this adapter queries a fixed
+ 90-day window centered on today (30 days back, 60 days forward),
+ which covers typical UDB browsing. Date format is ``Y-m-d H:i:s``.
+ Event detail and ``.ics`` export are addressed by event id alone
+ (``/api/pim/event/{eventId}`` and ``.../export``); the calendar
+ id from the path is kept only for tree-navigation continuity.
+ """
+
+ # Vendor enforces ``Range must be lower than 3 months``. We stay
+ # comfortably below to keep one call per browse.
+ _PAST_DAYS = 30
+ _FUTURE_DAYS = 60
def __init__(self, accessToken: str):
self._token = accessToken
@@ -242,169 +464,461 @@ class MailAdapter(ServiceAdapter):
filter: Optional[str] = None,
limit: Optional[int] = None,
) -> List[ExternalEntry]:
- cleanPath = (path or "").strip("/")
- segments = [s for s in cleanPath.split("/") if s]
-
+ segments = [s for s in (path or "").strip("/").split("/") if s]
if not segments:
- return await self._listMailboxes()
+ return await self._listCalendars()
if len(segments) == 1:
- return await self._listFolders(segments[0])
- if len(segments) == 2:
- return await self._listMessages(segments[0], segments[1], limit=limit)
+ return await self._listEvents(segments[0], limit=limit)
return []
- async def _listMailboxes(self) -> List[ExternalEntry]:
- result = await _infomaniakGet(self._token, "/1/mail")
- if isinstance(result, dict) and result.get("error"):
- logger.warning(f"Mail list-mailboxes failed: {result['error']}")
- return []
- data = _unwrapData(result)
- mailboxes = data if isinstance(data, list) else data.get("mailboxes", []) if isinstance(data, dict) else []
- entries: List[ExternalEntry] = []
- for mb in mailboxes:
- mbId = str(mb.get("id") or mb.get("mailbox_id") or "")
- if not mbId:
- continue
- entries.append(ExternalEntry(
- name=mb.get("email") or mb.get("name") or mbId,
- path=f"/{mbId}",
- isFolder=True,
- metadata={"id": mbId, "kind": "mailbox"},
- ))
- return entries
-
- async def _listFolders(self, mailboxId: str) -> List[ExternalEntry]:
- result = await _infomaniakGet(self._token, f"/1/mail/{mailboxId}/folder")
- if isinstance(result, dict) and result.get("error"):
- logger.warning(f"Mail list-folders {mailboxId} failed: {result['error']}")
- return []
- data = _unwrapData(result)
- folders = data if isinstance(data, list) else data.get("folders", []) if isinstance(data, dict) else []
- entries: List[ExternalEntry] = []
- for f in folders:
- folderId = str(f.get("id") or f.get("path") or "")
- if not folderId:
- continue
- entries.append(ExternalEntry(
- name=f.get("name") or folderId,
- path=f"/{mailboxId}/{folderId}",
- isFolder=True,
- metadata={"id": folderId, "kind": "folder"},
- ))
- return entries
-
- async def _listMessages(
- self,
- mailboxId: str,
- folderId: str,
- limit: Optional[int],
- ) -> List[ExternalEntry]:
- effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(
- 1, min(int(limit), self._MAX_MESSAGE_LIMIT),
+ async def _listCalendars(self) -> List[ExternalEntry]:
+ result = await _infomaniakGet(
+ self._token, f"{_PIM_PREFIX}/calendar", baseUrl=_CALENDAR_BASE
)
- endpoint = f"/1/mail/{mailboxId}/folder/{folderId}/message?per_page={effectiveLimit}"
- result = await _infomaniakGet(self._token, endpoint)
if isinstance(result, dict) and result.get("error"):
+ logger.warning(f"Calendar list-calendars failed: {result['error']}")
return []
data = _unwrapData(result)
- messages = data if isinstance(data, list) else data.get("messages", []) if isinstance(data, dict) else []
-
+ calendars = data.get("calendars", []) if isinstance(data, dict) else []
entries: List[ExternalEntry] = []
- for msg in messages:
- uid = str(msg.get("uid") or msg.get("id") or "")
- if not uid:
+ for cal in calendars:
+ calId = str(cal.get("id", ""))
+ if not calId:
continue
- subject = msg.get("subject") or "(no subject)"
+ isShared = (cal.get("user_id") or 0) <= 0 or cal.get("account_id") is None
entries.append(ExternalEntry(
- name=subject,
- path=f"/{mailboxId}/{folderId}/{uid}",
- isFolder=False,
- lastModified=msg.get("date") or msg.get("internal_date"),
+ name=cal.get("name") or calId,
+ path=f"/{calId}",
+ isFolder=True,
metadata={
- "uid": uid,
- "from": msg.get("from") or msg.get("sender", ""),
- "snippet": msg.get("preview", ""),
+ "id": calId,
+ "kind": "calendar",
+ "color": cal.get("color"),
+ "shared": isShared,
+ "default": bool(cal.get("default")),
},
))
return entries
+ def _eventWindow(self) -> tuple:
+ now = datetime.now(timezone.utc)
+ fromStr = (now - timedelta(days=self._PAST_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
+ toStr = (now + timedelta(days=self._FUTURE_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
+ return fromStr, toStr
+
+ async def _listEvents(
+ self,
+ calendarId: str,
+ limit: Optional[int],
+ ) -> List[ExternalEntry]:
+ fromStr, toStr = self._eventWindow()
+ endpoint = (
+ f"{_PIM_PREFIX}/event"
+ f"?calendar_id={calendarId}"
+ f"&from={quote(fromStr)}"
+ f"&to={quote(toStr)}"
+ )
+ result = await _infomaniakGet(self._token, endpoint, baseUrl=_CALENDAR_BASE)
+ if isinstance(result, dict) and result.get("error"):
+ logger.warning(f"Calendar list-events {calendarId} failed: {result['error']}")
+ return []
+ data = _unwrapData(result)
+ events = data if isinstance(data, list) else data.get("events", []) if isinstance(data, dict) else []
+ entries: List[ExternalEntry] = []
+ for ev in events:
+ evId = str(ev.get("id") or ev.get("uid") or "")
+ if not evId:
+ continue
+ title = ev.get("title") or ev.get("summary") or "(no title)"
+ entries.append(ExternalEntry(
+ name=title,
+ path=f"/{calendarId}/{evId}",
+ isFolder=False,
+ metadata={
+ "id": evId,
+ "kind": "event",
+ "start": ev.get("start"),
+ "end": ev.get("end"),
+ "location": ev.get("location"),
+ "updated": ev.get("updated_at"),
+ },
+ ))
+ if limit is not None:
+ return entries[: int(limit)]
+ return entries
+
async def download(self, path: str) -> DownloadResult:
- import re
segments = [s for s in (path or "").strip("/").split("/") if s]
- if len(segments) < 3:
+ if len(segments) < 2:
return DownloadResult()
- mailboxId, folderId, uid = segments[0], segments[1], segments[2]
+ eventId = segments[1]
content = await _infomaniakDownload(
- self._token, f"/1/mail/{mailboxId}/folder/{folderId}/message/{uid}/download",
+ self._token,
+ f"{_PIM_PREFIX}/event/{eventId}/export",
+ baseUrl=_CALENDAR_BASE,
)
if content is None:
return DownloadResult()
+ title = eventId
meta = await _infomaniakGet(
- self._token, f"/1/mail/{mailboxId}/folder/{folderId}/message/{uid}",
+ self._token,
+ f"{_PIM_PREFIX}/event/{eventId}",
+ baseUrl=_CALENDAR_BASE,
)
- subject = uid
if isinstance(meta, dict) and not meta.get("error"):
unwrapped = _unwrapData(meta)
if isinstance(unwrapped, dict):
- subject = unwrapped.get("subject") or uid
- safeName = re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", subject)[:80].strip(". ") or "email"
+ event = unwrapped.get("event") if "event" in unwrapped else unwrapped
+ if isinstance(event, dict):
+ title = event.get("title") or event.get("summary") or eventId
return DownloadResult(
data=content,
- fileName=f"{safeName}.eml",
- mimeType="message/rfc822",
+ fileName=f"{_safeFileName(title, 'event')}.ics",
+ mimeType="text/calendar",
)
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
- return {"error": "Mail upload not applicable"}
+ return {"error": "Calendar upload not yet implemented"}
async def search(
self,
query: str,
path: Optional[str] = None,
limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ # The PIM Calendar API has no public search endpoint we can rely on.
+ # Cheap fallback: list events in the current calendar (or all of
+ # them) within the default window and filter case-insensitively on
+ # title/location.
+ calendars = (
+ await self._listCalendars()
+ if not path
+ else [ExternalEntry(name="", path=path, isFolder=True)]
+ )
+ if not calendars:
+ return []
+ needle = (query or "").strip().lower()
+ results: List[ExternalEntry] = []
+ for cal in calendars:
+ calId = (cal.metadata or {}).get("id") or cal.path.strip("/")
+ for ev in await self._listEvents(calId, limit=limit):
+ hay = " ".join(
+ str(v) for v in (
+ ev.name,
+ (ev.metadata or {}).get("location") or "",
+ )
+ ).lower()
+ if not needle or needle in hay:
+ results.append(ev)
+ if limit is not None and len(results) >= int(limit):
+ break
+ return results[: int(limit)] if limit is not None else results
+
+
+def _vcardEscape(value: Any) -> str:
+ """Escape a value for vCard 3.0 -- backslash, comma, semicolon, newline."""
+ text = "" if value is None else str(value)
+ return (
+ text.replace("\\", "\\\\")
+ .replace(";", "\\;")
+ .replace(",", "\\,")
+ .replace("\r\n", "\\n")
+ .replace("\n", "\\n")
+ )
+
+
+def _renderInfomaniakVcard(record: Dict[str, Any]) -> str:
+ """Render an Infomaniak contact record as a vCard 3.0 string.
+
+ The Contacts PIM ``/contact/{id}/export`` endpoint is not PAT-friendly
+ (302s to the OAuth login page), and ``/contact/{id}`` returns 500 with
+ a PAT, so we cannot retrieve the canonical .vcf or detail blob from
+ Infomaniak. Instead we synthesize a vCard 3.0 payload from the
+ listing record fetched with ``with=emails,phones,addresses,details``.
+
+ vCard 3.0 is the common-denominator format universally accepted by
+ Outlook, Google Contacts, Apple Contacts and Thunderbird (4.0 still
+ has poor Outlook import compatibility).
+ """
+ firstname = record.get("firstname") or ""
+ lastname = record.get("lastname") or ""
+ fullName = (
+ record.get("name")
+ or " ".join(p for p in (firstname, lastname) if p).strip()
+ or "Contact"
+ )
+ organization = record.get("organization") or ""
+ note = record.get("note") or ""
+ emails = record.get("emails") or []
+ phones = record.get("phones") or []
+ addresses = record.get("addresses") or []
+ websites = record.get("websites") or []
+
+ lines = ["BEGIN:VCARD", "VERSION:3.0"]
+ # N: Last;First;Middle;Prefix;Suffix
+ lines.append(f"N:{_vcardEscape(lastname)};{_vcardEscape(firstname)};;;")
+ lines.append(f"FN:{_vcardEscape(fullName)}")
+ if organization:
+ lines.append(f"ORG:{_vcardEscape(organization)}")
+ for email in emails:
+ if isinstance(email, str) and email:
+ lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email)}")
+ elif isinstance(email, dict) and email.get("address"):
+ lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email['address'])}")
+ for phone in phones:
+ if isinstance(phone, str) and phone:
+ lines.append(f"TEL:{_vcardEscape(phone)}")
+ elif isinstance(phone, dict) and phone.get("number"):
+ lines.append(f"TEL:{_vcardEscape(phone['number'])}")
+ for addr in addresses:
+ if isinstance(addr, dict):
+ # ADR: PO-Box;Extended;Street;City;Region;Postal;Country
+ lines.append(
+ "ADR:;;"
+ f"{_vcardEscape(addr.get('street'))};"
+ f"{_vcardEscape(addr.get('city'))};"
+ f"{_vcardEscape(addr.get('region'))};"
+ f"{_vcardEscape(addr.get('zip') or addr.get('postal_code'))};"
+ f"{_vcardEscape(addr.get('country'))}"
+ )
+ for site in websites:
+ if isinstance(site, str) and site:
+ lines.append(f"URL:{_vcardEscape(site)}")
+ elif isinstance(site, dict) and site.get("url"):
+ lines.append(f"URL:{_vcardEscape(site['url'])}")
+ if note:
+ lines.append(f"NOTE:{_vcardEscape(note)}")
+ lines.append("END:VCARD")
+ return "\r\n".join(lines) + "\r\n"
+
+
+class ContactAdapter(ServiceAdapter):
+ """Infomaniak Contacts adapter -- browse address books + contacts, .vcf download.
+
+ Uses the public PIM endpoint at ``contacts.infomaniak.com/api/pim``,
+ which authenticates with the PAT scope ``workspace:contact``.
+
+ Path layout:
+ ``/`` -> list address books
+ ``/{addressBookId}`` -> list contacts in that book
+ ``/{addressBookId}/{contactId}`` -> single contact (download as .vcf)
+
+ Endpoint particulars:
+ Listing both address books and contacts is PAT-friendly. The
+ contact-listing call uses ``with=emails,phones,addresses,details``
+ so each record arrives with all the fields needed for vCard
+ synthesis -- Infomaniak skips them by default. Detail and export
+ endpoints (``/contact/{id}``, ``/contact/{id}/export``) are **not**
+ PAT-friendly (the former 500s, the latter 302s to OAuth), so the
+ ``download`` path re-fetches the listing and renders the vCard
+ ourselves via :func:`_renderInfomaniakVcard`.
+ """
+
+ _DEFAULT_CONTACT_LIMIT = 200
+ _MAX_CONTACT_LIMIT = 1000
+ _CONTACT_FIELDS = "emails,phones,addresses,details"
+
+ def __init__(self, accessToken: str):
+ self._token = accessToken
+
+ async def browse(
+ self,
+ path: str,
+ filter: Optional[str] = None,
+ limit: Optional[int] = None,
) -> List[ExternalEntry]:
segments = [s for s in (path or "").strip("/").split("/") if s]
if not segments:
- mailboxes = await self._listMailboxes()
- if not mailboxes:
- return []
- mailboxId = (mailboxes[0].metadata or {}).get("id") or mailboxes[0].path.strip("/")
- else:
- mailboxId = segments[0]
+ return await self._listAddressBooks()
+ if len(segments) == 1:
+ return await self._listContacts(segments[0], limit=limit)
+ return []
- effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(
- 1, min(int(limit), self._MAX_MESSAGE_LIMIT),
+ async def _listAddressBooks(self) -> List[ExternalEntry]:
+ result = await _infomaniakGet(
+ self._token, f"{_PIM_PREFIX}/addressbook", baseUrl=_CONTACTS_BASE
)
- endpoint = f"/1/mail/{mailboxId}/message/search?query={query}&per_page={effectiveLimit}"
- result = await _infomaniakGet(self._token, endpoint)
if isinstance(result, dict) and result.get("error"):
+ logger.warning(f"Contacts list-addressbooks failed: {result['error']}")
return []
data = _unwrapData(result)
- messages = data if isinstance(data, list) else data.get("messages", []) if isinstance(data, dict) else []
-
+ books = data.get("addressbooks", []) if isinstance(data, dict) else []
entries: List[ExternalEntry] = []
- for msg in messages:
- uid = str(msg.get("uid") or msg.get("id") or "")
- if not uid:
+ for book in books:
+ bookId = str(book.get("id", ""))
+ if not bookId:
continue
- folderId = str(msg.get("folder_id") or msg.get("folderId") or "")
+ isShared = bool(book.get("is_shared")) or (book.get("user_id") or 0) <= 0
+ # The shared organisation directory has an empty name -- give it a
+ # human label so the UDB tree is not blank.
+ name = book.get("name") or (
+ "Organisation" if book.get("is_dynamic_organisation_member_directory") else bookId
+ )
entries.append(ExternalEntry(
- name=msg.get("subject") or uid,
- path=f"/{mailboxId}/{folderId}/{uid}" if folderId else f"/{mailboxId}/{uid}",
- isFolder=False,
- metadata={"uid": uid, "from": msg.get("from", "")},
+ name=name,
+ path=f"/{bookId}",
+ isFolder=True,
+ metadata={
+ "id": bookId,
+ "kind": "addressbook",
+ "color": book.get("color"),
+ "shared": isShared,
+ "default": bool(book.get("default")),
+ },
))
return entries
+ async def _fetchContacts(
+ self,
+ addressBookId: str,
+ perPage: int,
+ ) -> List[Dict[str, Any]]:
+ """Raw listing call -- shared by browse and download."""
+ endpoint = (
+ f"{_PIM_PREFIX}/addressbook/{addressBookId}/contact"
+ f"?per_page={perPage}&with={self._CONTACT_FIELDS}"
+ )
+ result = await _infomaniakGet(self._token, endpoint, baseUrl=_CONTACTS_BASE)
+ if isinstance(result, dict) and result.get("error"):
+ logger.warning(
+ f"Contacts list-contacts {addressBookId} failed: {result['error']}"
+ )
+ return []
+ data = _unwrapData(result)
+ if isinstance(data, list):
+ return [c for c in data if isinstance(c, dict)]
+ if isinstance(data, dict):
+ contacts = data.get("contacts", [])
+ return [c for c in contacts if isinstance(c, dict)]
+ return []
+
+ async def _listContacts(
+ self,
+ addressBookId: str,
+ limit: Optional[int],
+ ) -> List[ExternalEntry]:
+ effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(
+ 1, min(int(limit), self._MAX_CONTACT_LIMIT),
+ )
+ contacts = await self._fetchContacts(addressBookId, perPage=effectiveLimit)
+ entries: List[ExternalEntry] = []
+ for c in contacts:
+ cId = str(c.get("id") or c.get("uid") or "")
+ if not cId:
+ continue
+ firstName = c.get("firstname")
+ lastName = c.get("lastname")
+ displayName = (
+ c.get("name")
+ or " ".join(p for p in (firstName, lastName) if p).strip()
+ or (c.get("emails") or [None])[0]
+ or cId
+ )
+ firstEmail = (c.get("emails") or [None])[0]
+ firstPhone = (c.get("phones") or [None])[0]
+ entries.append(ExternalEntry(
+ name=str(displayName),
+ path=f"/{addressBookId}/{cId}",
+ isFolder=False,
+ metadata={
+ "id": cId,
+ "kind": "contact",
+ "email": firstEmail,
+ "phone": firstPhone,
+ "organization": c.get("organization"),
+ },
+ ))
+ return entries
+
+ async def download(self, path: str) -> DownloadResult:
+ segments = [s for s in (path or "").strip("/").split("/") if s]
+ if len(segments) < 2:
+ return DownloadResult()
+ addressBookId, contactId = segments[0], segments[1]
+
+ # The PIM contact-detail endpoint (``/contact/{id}``) returns 500
+ # against a PAT, and ``/contact/{id}/export`` 302s to OAuth. We
+ # therefore re-fetch the listing (which IS PAT-friendly) with all
+ # vCard-relevant fields, then synthesize the .vcf ourselves.
+ contacts = await self._fetchContacts(
+ addressBookId, perPage=self._MAX_CONTACT_LIMIT
+ )
+ record = next((c for c in contacts if str(c.get("id")) == contactId), None)
+ if record is None:
+ logger.warning(
+ f"Contacts download: contact {contactId} not found in book "
+ f"{addressBookId}"
+ )
+ return DownloadResult()
+
+ firstName = record.get("firstname") or ""
+ lastName = record.get("lastname") or ""
+ displayName = (
+ record.get("name")
+ or " ".join(p for p in (firstName, lastName) if p).strip()
+ or contactId
+ )
+ vcardText = _renderInfomaniakVcard(record)
+ return DownloadResult(
+ data=vcardText.encode("utf-8"),
+ fileName=f"{_safeFileName(displayName, 'contact')}.vcf",
+ mimeType="text/vcard",
+ )
+
+ async def upload(self, path: str, data: bytes, fileName: str) -> dict:
+ return {"error": "Contacts upload not yet implemented"}
+
+ async def search(
+ self,
+ query: str,
+ path: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ # No public search endpoint -- list contacts of the current (or all)
+ # address books and filter client-side on display name / email.
+ books = (
+ await self._listAddressBooks()
+ if not path
+ else [ExternalEntry(name="", path=path, isFolder=True)]
+ )
+ if not books:
+ return []
+ needle = (query or "").strip().lower()
+ results: List[ExternalEntry] = []
+ for book in books:
+ bookId = (book.metadata or {}).get("id") or book.path.strip("/")
+ for c in await self._listContacts(bookId, limit=limit):
+ hay = " ".join(
+ str(v) for v in (
+ c.name,
+ (c.metadata or {}).get("email") or "",
+ (c.metadata or {}).get("organization") or "",
+ )
+ ).lower()
+ if not needle or needle in hay:
+ results.append(c)
+ if limit is not None and len(results) >= int(limit):
+ break
+ return results[: int(limit)] if limit is not None else results
+
class InfomaniakConnector(ProviderConnector):
- """Infomaniak ProviderConnector -- 1 connection -> kDrive + Mail."""
+ """Infomaniak ProviderConnector -- kDrive + Calendar + Contacts today.
+
+ Mail is reserved on the PAT (scope ``workspace:mail``) but not wired
+ up here yet -- Infomaniak has no public PAT-friendly Mail endpoint
+ today (the PIM Mail routes 302 to OAuth, the legacy ``/api/mail`` route
+ 301-redirects to an internal Cyrus port). Once a working endpoint is
+ found, the corresponding adapter can be slotted into ``_SERVICE_MAP``
+ without any token rotation on the user side.
+ """
_SERVICE_MAP = {
"kdrive": KdriveAdapter,
- "mail": MailAdapter,
+ "calendar": CalendarAdapter,
+ "contact": ContactAdapter,
}
def getAvailableServices(self) -> List[str]:
diff --git a/modules/connectors/providerMsft/connectorMsft.py b/modules/connectors/providerMsft/connectorMsft.py
index 30caba95..bf290eca 100644
--- a/modules/connectors/providerMsft/connectorMsft.py
+++ b/modules/connectors/providerMsft/connectorMsft.py
@@ -841,6 +841,285 @@ class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
return entries
+# ---------------------------------------------------------------------------
+# Calendar Adapter
+# ---------------------------------------------------------------------------
+
+class CalendarAdapter(_GraphApiMixin, ServiceAdapter):
+ """ServiceAdapter for Outlook Calendar via Microsoft Graph.
+
+ Path conventions:
+ ``""`` / ``"/"`` -> list user calendars
+ ``"/"`` -> list events in that calendar
+ ``"//"`` -> reserved for future event detail browse
+
+ Downloads return a synthesised ``.ics`` (VCALENDAR/VEVENT) since Microsoft
+ Graph does not expose a ``/$value`` endpoint for events.
+ """
+
+ _DEFAULT_EVENT_LIMIT = 100
+ _MAX_EVENT_LIMIT = 1000
+ _PAGE_SIZE = 100
+
+ async def browse(
+ self,
+ path: str,
+ filter: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ cleanPath = (path or "").strip("/")
+ if not cleanPath:
+ result = await self._graphGet("me/calendars?$top=100")
+ if "error" in result:
+ logger.warning(f"MSFT Calendar list failed: {result['error']}")
+ return []
+ calendars = result.get("value", [])
+ if filter:
+ calendars = [c for c in calendars if filter.lower() in (c.get("name") or "").lower()]
+ return [
+ ExternalEntry(
+ name=c.get("name", ""),
+ path=f"/{c.get('id', '')}",
+ isFolder=True,
+ metadata={
+ "id": c.get("id"),
+ "color": c.get("color"),
+ "owner": (c.get("owner") or {}).get("address"),
+ "isDefaultCalendar": c.get("isDefaultCalendar", False),
+ "canEdit": c.get("canEdit", False),
+ },
+ )
+ for c in calendars
+ ]
+
+ calendarId = cleanPath.split("/", 1)[0]
+ effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
+ pageSize = min(self._PAGE_SIZE, effectiveLimit)
+ endpoint: Optional[str] = (
+ f"me/calendars/{calendarId}/events"
+ f"?$top={pageSize}&$orderby=start/dateTime desc"
+ )
+ events: List[Dict[str, Any]] = []
+ while endpoint and len(events) < effectiveLimit:
+ result = await self._graphGet(endpoint)
+ if "error" in result:
+ logger.warning(f"MSFT Calendar events failed: {result['error']}")
+ break
+ for ev in result.get("value", []):
+ events.append(ev)
+ if len(events) >= effectiveLimit:
+ break
+ nextLink = result.get("@odata.nextLink")
+ endpoint = _stripGraphBase(nextLink) if nextLink else None
+
+ return [
+ ExternalEntry(
+ name=ev.get("subject", "(no subject)"),
+ path=f"/{calendarId}/{ev.get('id', '')}",
+ isFolder=False,
+ mimeType="text/calendar",
+ metadata={
+ "id": ev.get("id"),
+ "start": (ev.get("start") or {}).get("dateTime"),
+ "end": (ev.get("end") or {}).get("dateTime"),
+ "location": (ev.get("location") or {}).get("displayName"),
+ "organizer": (ev.get("organizer") or {}).get("emailAddress", {}).get("address"),
+ "isAllDay": ev.get("isAllDay", False),
+ "webLink": ev.get("webLink"),
+ },
+ )
+ for ev in events
+ ]
+
+ async def download(self, path: str) -> DownloadResult:
+ cleanPath = (path or "").strip("/")
+ if "/" not in cleanPath:
+ return DownloadResult()
+ eventId = cleanPath.split("/")[-1]
+ ev = await self._graphGet(f"me/events/{eventId}")
+ if "error" in ev:
+ logger.warning(f"MSFT Calendar event fetch failed: {ev['error']}")
+ return DownloadResult()
+ icsBytes = _eventToIcs(ev)
+ subject = ev.get("subject") or eventId
+ safeName = _safeFileName(subject) or "event"
+ return DownloadResult(
+ data=icsBytes,
+ fileName=f"{safeName}.ics",
+ mimeType="text/calendar",
+ )
+
+ async def upload(self, path: str, data: bytes, fileName: str) -> dict:
+ return {"error": "Calendar upload not supported"}
+
+ async def search(
+ self,
+ query: str,
+ path: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ safeQuery = query.replace("'", "''")
+ effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
+ endpoint = f"me/events?$search=\"{safeQuery}\"&$top={effectiveLimit}"
+ result = await self._graphGet(endpoint)
+ if "error" in result:
+ return []
+ return [
+ ExternalEntry(
+ name=ev.get("subject", "(no subject)"),
+ path=f"/search/{ev.get('id', '')}",
+ isFolder=False,
+ mimeType="text/calendar",
+ metadata={
+ "id": ev.get("id"),
+ "start": (ev.get("start") or {}).get("dateTime"),
+ "end": (ev.get("end") or {}).get("dateTime"),
+ },
+ )
+ for ev in result.get("value", [])
+ ]
+
+
+# ---------------------------------------------------------------------------
+# Contacts Adapter
+# ---------------------------------------------------------------------------
+
+class ContactsAdapter(_GraphApiMixin, ServiceAdapter):
+ """ServiceAdapter for Outlook Contacts via Microsoft Graph.
+
+ Path conventions:
+ ``""`` -> list contact folders (default + custom)
+ ``"/"`` -> list contacts in that folder; the
+ virtual id ``default`` maps to
+ ``/me/contacts`` (the user's primary
+ contact list)
+ ``"//"`` -> reserved for future detail browse
+
+ Downloads return a synthesised vCard 3.0 (.vcf) since Microsoft Graph
+ does not expose a ``/$value`` endpoint for contacts.
+ """
+
+ _DEFAULT_CONTACT_LIMIT = 200
+ _MAX_CONTACT_LIMIT = 1000
+ _PAGE_SIZE = 100
+ _DEFAULT_FOLDER_ID = "default"
+
+ async def browse(
+ self,
+ path: str,
+ filter: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ cleanPath = (path or "").strip("/")
+ if not cleanPath:
+ folders: List[ExternalEntry] = [
+ ExternalEntry(
+ name="Kontakte",
+ path=f"/{self._DEFAULT_FOLDER_ID}",
+ isFolder=True,
+ metadata={"id": self._DEFAULT_FOLDER_ID, "isDefault": True},
+ ),
+ ]
+ result = await self._graphGet("me/contactFolders?$top=100")
+ if "error" not in result:
+ for f in result.get("value", []):
+ folders.append(
+ ExternalEntry(
+ name=f.get("displayName", ""),
+ path=f"/{f.get('id', '')}",
+ isFolder=True,
+ metadata={"id": f.get("id"), "parentFolderId": f.get("parentFolderId")},
+ )
+ )
+ else:
+ logger.warning(f"MSFT contactFolders list failed: {result['error']}")
+ return folders
+
+ folderId = cleanPath.split("/", 1)[0]
+ effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
+ pageSize = min(self._PAGE_SIZE, effectiveLimit)
+ if folderId == self._DEFAULT_FOLDER_ID:
+ endpoint: Optional[str] = f"me/contacts?$top={pageSize}&$orderby=displayName"
+ else:
+ endpoint = f"me/contactFolders/{folderId}/contacts?$top={pageSize}&$orderby=displayName"
+
+ contacts: List[Dict[str, Any]] = []
+ while endpoint and len(contacts) < effectiveLimit:
+ result = await self._graphGet(endpoint)
+ if "error" in result:
+ logger.warning(f"MSFT contacts list failed: {result['error']}")
+ break
+ for c in result.get("value", []):
+ contacts.append(c)
+ if len(contacts) >= effectiveLimit:
+ break
+ nextLink = result.get("@odata.nextLink")
+ endpoint = _stripGraphBase(nextLink) if nextLink else None
+
+ return [
+ ExternalEntry(
+ name=c.get("displayName") or _personLabel(c) or "(no name)",
+ path=f"/{folderId}/{c.get('id', '')}",
+ isFolder=False,
+ mimeType="text/vcard",
+ metadata={
+ "id": c.get("id"),
+ "givenName": c.get("givenName"),
+ "surname": c.get("surname"),
+ "companyName": c.get("companyName"),
+ "emailAddresses": [e.get("address") for e in (c.get("emailAddresses") or []) if e.get("address")],
+ "businessPhones": c.get("businessPhones") or [],
+ "mobilePhone": c.get("mobilePhone"),
+ },
+ )
+ for c in contacts
+ ]
+
+ async def download(self, path: str) -> DownloadResult:
+ cleanPath = (path or "").strip("/")
+ if "/" not in cleanPath:
+ return DownloadResult()
+ contactId = cleanPath.split("/")[-1]
+ c = await self._graphGet(f"me/contacts/{contactId}")
+ if "error" in c:
+ logger.warning(f"MSFT contact fetch failed: {c['error']}")
+ return DownloadResult()
+ vcfBytes = _contactToVcard(c)
+ label = c.get("displayName") or _personLabel(c) or contactId
+ safeName = _safeFileName(label) or "contact"
+ return DownloadResult(
+ data=vcfBytes,
+ fileName=f"{safeName}.vcf",
+ mimeType="text/vcard",
+ )
+
+ async def upload(self, path: str, data: bytes, fileName: str) -> dict:
+ return {"error": "Contacts upload not supported"}
+
+ async def search(
+ self,
+ query: str,
+ path: Optional[str] = None,
+ limit: Optional[int] = None,
+ ) -> List[ExternalEntry]:
+ safeQuery = query.replace("'", "''")
+ effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
+ endpoint = f"me/contacts?$search=\"{safeQuery}\"&$top={effectiveLimit}"
+ result = await self._graphGet(endpoint)
+ if "error" in result:
+ return []
+ return [
+ ExternalEntry(
+ name=c.get("displayName") or _personLabel(c) or "(no name)",
+ path=f"/search/{c.get('id', '')}",
+ isFolder=False,
+ mimeType="text/vcard",
+ metadata={"id": c.get("id")},
+ )
+ for c in result.get("value", [])
+ ]
+
+
# ---------------------------------------------------------------------------
# MsftConnector (1:n)
# ---------------------------------------------------------------------------
@@ -853,6 +1132,8 @@ class MsftConnector(ProviderConnector):
"outlook": OutlookAdapter,
"teams": TeamsAdapter,
"onedrive": OneDriveAdapter,
+ "calendar": CalendarAdapter,
+ "contact": ContactsAdapter,
}
def getAvailableServices(self) -> List[str]:
@@ -891,3 +1172,143 @@ def _matchFilter(entry: ExternalEntry, pattern: str) -> bool:
"""Simple glob-like filter (supports * wildcard)."""
import fnmatch
return fnmatch.fnmatch(entry.name.lower(), pattern.lower())
+
+
+def _safeFileName(name: str) -> str:
+ """Strip path-unsafe characters and trim length so the result is a usable file name."""
+ import re
+ return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
+
+
+def _personLabel(contact: Dict[str, Any]) -> str:
+ given = (contact.get("givenName") or "").strip()
+ surname = (contact.get("surname") or "").strip()
+ if given or surname:
+ return f"{given} {surname}".strip()
+ company = (contact.get("companyName") or "").strip()
+ return company
+
+
+def _icsEscape(value: str) -> str:
+ """Escape RFC 5545 reserved characters in TEXT properties."""
+ if value is None:
+ return ""
+ return (
+ value.replace("\\", "\\\\")
+ .replace(";", "\\;")
+ .replace(",", "\\,")
+ .replace("\r\n", "\\n")
+ .replace("\n", "\\n")
+ )
+
+
+def _icsDateTime(value: Optional[str]) -> Optional[str]:
+ """Convert an ISO datetime string to an RFC 5545 DATE-TIME value (UTC)."""
+ if not value:
+ return None
+ from datetime import datetime, timezone
+ try:
+ normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
+ dt = datetime.fromisoformat(normalized)
+ if dt.tzinfo is None:
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
+ except (TypeError, ValueError):
+ return None
+
+
+def _eventToIcs(event: Dict[str, Any]) -> bytes:
+ """Build a minimal RFC 5545 VCALENDAR/VEVENT for a Graph event payload."""
+ from datetime import datetime, timezone
+ uid = event.get("iCalUId") or event.get("id") or "unknown@poweron"
+ summary = _icsEscape(event.get("subject") or "")
+ location = _icsEscape((event.get("location") or {}).get("displayName") or "")
+ body = (event.get("body") or {}).get("content") or ""
+ description = _icsEscape(body)
+ dtstart = _icsDateTime((event.get("start") or {}).get("dateTime"))
+ dtend = _icsDateTime((event.get("end") or {}).get("dateTime"))
+ dtstamp = _icsDateTime(event.get("lastModifiedDateTime")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
+
+ lines = [
+ "BEGIN:VCALENDAR",
+ "VERSION:2.0",
+ "PRODID:-//PowerOn//MSFT-Calendar-Adapter//EN",
+ "CALSCALE:GREGORIAN",
+ "BEGIN:VEVENT",
+ f"UID:{uid}",
+ f"DTSTAMP:{dtstamp}",
+ ]
+ if dtstart:
+ lines.append(f"DTSTART:{dtstart}")
+ if dtend:
+ lines.append(f"DTEND:{dtend}")
+ if summary:
+ lines.append(f"SUMMARY:{summary}")
+ if location:
+ lines.append(f"LOCATION:{location}")
+ if description:
+ lines.append(f"DESCRIPTION:{description}")
+ organizer = (event.get("organizer") or {}).get("emailAddress", {}).get("address")
+ if organizer:
+ lines.append(f"ORGANIZER:mailto:{organizer}")
+ for att in (event.get("attendees") or []):
+ addr = (att.get("emailAddress") or {}).get("address")
+ if addr:
+ lines.append(f"ATTENDEE:mailto:{addr}")
+ lines.append("END:VEVENT")
+ lines.append("END:VCALENDAR")
+ return ("\r\n".join(lines) + "\r\n").encode("utf-8")
+
+
+def _contactToVcard(contact: Dict[str, Any]) -> bytes:
+ """Build a vCard 3.0 from a Graph /me/contacts payload."""
+ given = contact.get("givenName") or ""
+ surname = contact.get("surname") or ""
+ middle = contact.get("middleName") or ""
+ fn = contact.get("displayName") or _personLabel(contact) or contact.get("companyName") or ""
+
+ lines = [
+ "BEGIN:VCARD",
+ "VERSION:3.0",
+ f"N:{surname};{given};{middle};;",
+ f"FN:{fn}",
+ ]
+ if contact.get("companyName"):
+ org = contact["companyName"]
+ if contact.get("department"):
+ org = f"{org};{contact['department']}"
+ lines.append(f"ORG:{org}")
+ if contact.get("jobTitle"):
+ lines.append(f"TITLE:{contact['jobTitle']}")
+ for em in (contact.get("emailAddresses") or []):
+ addr = em.get("address")
+ if addr:
+ lines.append(f"EMAIL;TYPE=INTERNET:{addr}")
+ for phone in (contact.get("businessPhones") or []):
+ if phone:
+ lines.append(f"TEL;TYPE=WORK,VOICE:{phone}")
+ if contact.get("mobilePhone"):
+ lines.append(f"TEL;TYPE=CELL,VOICE:{contact['mobilePhone']}")
+ for phone in (contact.get("homePhones") or []):
+ if phone:
+ lines.append(f"TEL;TYPE=HOME,VOICE:{phone}")
+
+ def _appendAddress(addr: Dict[str, Any], typ: str) -> None:
+ if not addr:
+ return
+ street = addr.get("street") or ""
+ city = addr.get("city") or ""
+ state = addr.get("state") or ""
+ postal = addr.get("postalCode") or ""
+ country = addr.get("countryOrRegion") or ""
+ if any([street, city, state, postal, country]):
+ lines.append(f"ADR;TYPE={typ}:;;{street};{city};{state};{postal};{country}")
+
+ _appendAddress(contact.get("businessAddress") or {}, "WORK")
+ _appendAddress(contact.get("homeAddress") or {}, "HOME")
+ _appendAddress(contact.get("otherAddress") or {}, "OTHER")
+ if contact.get("personalNotes"):
+ lines.append(f"NOTE:{_icsEscape(contact['personalNotes'])}")
+ lines.append(f"UID:{contact.get('id', '')}")
+ lines.append("END:VCARD")
+ return ("\r\n".join(lines) + "\r\n").encode("utf-8")
diff --git a/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py b/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
index 0dccfb36..aed94a68 100644
--- a/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
+++ b/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
@@ -1160,6 +1160,9 @@ async def list_connection_services(
"drive": "Google Drive",
"gmail": "Gmail",
"files": "Files (FTP)",
+ "kdrive": "kDrive",
+ "calendar": "Calendar",
+ "contact": "Contacts",
}
_serviceIcons = {
"sharepoint": "sharepoint",
@@ -1170,6 +1173,9 @@ async def list_connection_services(
"drive": "cloud",
"gmail": "mail",
"files": "folder",
+ "kdrive": "cloud",
+ "calendar": "calendar",
+ "contact": "contact",
}
items = [
{"service": s, "label": _serviceLabels.get(s, s), "icon": _serviceIcons.get(s, "folder")}
diff --git a/modules/features/workspace/routeFeatureWorkspace.py b/modules/features/workspace/routeFeatureWorkspace.py
index 96313293..3fa85c6c 100644
--- a/modules/features/workspace/routeFeatureWorkspace.py
+++ b/modules/features/workspace/routeFeatureWorkspace.py
@@ -1818,6 +1818,9 @@ async def listConnectionServices(
"drive": "Google Drive",
"gmail": "Gmail",
"files": "Files (FTP)",
+ "kdrive": "kDrive",
+ "calendar": "Calendar",
+ "contact": "Contacts",
}
_serviceIcons = {
"sharepoint": "sharepoint",
@@ -1827,6 +1830,9 @@ async def listConnectionServices(
"drive": "cloud",
"gmail": "mail",
"files": "folder",
+ "kdrive": "cloud",
+ "calendar": "calendar",
+ "contact": "contact",
}
items = [
{
diff --git a/modules/interfaces/interfaceDbApp.py b/modules/interfaces/interfaceDbApp.py
index d5803e4b..51519a29 100644
--- a/modules/interfaces/interfaceDbApp.py
+++ b/modules/interfaces/interfaceDbApp.py
@@ -3331,7 +3331,10 @@ class AppObjects:
)
if not tokens:
- logger.warning(
+ # Pending connections legitimately have no token yet (PAT not
+ # submitted, OAuth callback not completed). Keep at DEBUG to
+ # avoid noisy warnings on every connection-list refresh.
+ logger.debug(
f"No connection token found for connectionId: {connectionId}"
)
return None
diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py
index dfaa09cd..8e7a730d 100644
--- a/modules/routes/routeDataConnections.py
+++ b/modules/routes/routeDataConnections.py
@@ -484,16 +484,23 @@ def update_connection(
def connect_service(
request: Request,
connectionId: str = Path(..., description="The ID of the connection to connect"),
+ body: Optional[Dict[str, Any]] = Body(default=None),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
- """Connect a service for the current user
-
+ """Connect a service for the current user.
+
+ Optional body: ``{"reauth": true}`` -- forces the OAuth provider to re-show
+ the consent screen, which is required when new scopes have been added (e.g.
+ Calendar + Contacts after the connection was first created). Without this
+ flag the provider silently re-uses the previous consent and never grants
+ the new scopes, leaving the connection in a degraded state.
+
SECURITY: This endpoint is secure - users can only connect their own connections.
"""
-
+
try:
interface = getInterface(currentUser)
-
+
# Find the connection
connection = None
# SECURITY FIX: All users (including admins) can only connect their own connections
@@ -503,29 +510,40 @@ def connect_service(
if conn.id == connectionId:
connection = conn
break
-
+
if not connection:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=routeApiMsg("Connection not found")
)
-
+
+ reauth = bool((body or {}).get("reauth")) if isinstance(body, dict) else False
+ reauthSuffix = "&reauth=1" if reauth else ""
+
# Data-app OAuth (JWT state issued server-side in /auth/connect)
auth_url = None
if connection.authority == AuthAuthority.MSFT:
- auth_url = f"/api/msft/auth/connect?connectionId={quote(connectionId, safe='')}"
+ auth_url = f"/api/msft/auth/connect?connectionId={quote(connectionId, safe='')}{reauthSuffix}"
elif connection.authority == AuthAuthority.GOOGLE:
- auth_url = f"/api/google/auth/connect?connectionId={quote(connectionId, safe='')}"
+ auth_url = f"/api/google/auth/connect?connectionId={quote(connectionId, safe='')}{reauthSuffix}"
elif connection.authority == AuthAuthority.CLICKUP:
- auth_url = f"/api/clickup/auth/connect?connectionId={quote(connectionId, safe='')}"
+ auth_url = f"/api/clickup/auth/connect?connectionId={quote(connectionId, safe='')}{reauthSuffix}"
elif connection.authority == AuthAuthority.INFOMANIAK:
- auth_url = f"/api/infomaniak/auth/connect?connectionId={quote(connectionId, safe='')}"
+ # Infomaniak does not use OAuth for data access; the frontend posts a
+ # Personal Access Token directly to /api/infomaniak/connections/{id}/token.
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=routeApiMsg(
+ "Infomaniak uses a Personal Access Token instead of OAuth. "
+ "Submit the token via POST /api/infomaniak/connections/{connectionId}/token."
+ ),
+ )
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Unsupported authority: {connection.authority}"
)
-
+
return {"authUrl": auth_url}
except HTTPException:
diff --git a/modules/routes/routeSecurityGoogle.py b/modules/routes/routeSecurityGoogle.py
index 96b84203..523523ee 100644
--- a/modules/routes/routeSecurityGoogle.py
+++ b/modules/routes/routeSecurityGoogle.py
@@ -281,9 +281,17 @@ async def auth_login_callback(
def auth_connect(
request: Request,
connectionId: str = Query(..., description="UserConnection id"),
+ reauth: Optional[int] = Query(0, description="If 1, force the consent screen so newly added scopes are granted"),
currentUser: User = Depends(getCurrentUser),
) -> RedirectResponse:
- """Start Google Data OAuth for an existing connection (requires gateway session)."""
+ """Start Google Data OAuth for an existing connection (requires gateway session).
+
+ Google already defaults to ``prompt=consent`` here, but ``include_granted_scopes=true``
+ can cause newly added scopes (e.g. calendar.readonly, contacts.readonly) to be
+ silently dropped on subsequent re-authorisations. With ``reauth=1`` we drop
+ ``include_granted_scopes`` so Google re-issues a token strictly for the
+ current scope list.
+ """
try:
_require_google_data_config()
interface = getInterface(currentUser)
@@ -310,9 +318,10 @@ def auth_connect(
)
extra_params: Dict[str, Any] = {
"access_type": "offline",
- "include_granted_scopes": "true",
"state": state_jwt,
}
+ if not reauth:
+ extra_params["include_granted_scopes"] = "true"
login_hint = connection.externalEmail or connection.externalUsername
if login_hint:
extra_params["login_hint"] = login_hint
diff --git a/modules/routes/routeSecurityInfomaniak.py b/modules/routes/routeSecurityInfomaniak.py
index 24c478f9..5bf079a1 100644
--- a/modules/routes/routeSecurityInfomaniak.py
+++ b/modules/routes/routeSecurityInfomaniak.py
@@ -1,69 +1,72 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
-"""Infomaniak OAuth for data connections (UserConnection + Token).
+"""Infomaniak Personal-Access-Token onboarding for data connections.
-Pure DATA_CONNECTION flow -- Infomaniak is NOT a login authority for PowerOn.
+Infomaniak does NOT support OAuth scopes for kDrive/kSuite data access.
+The user must create a Personal Access Token (PAT) at
+https://manager.infomaniak.com/v3/ng/accounts/token/list with the API
+scopes:
+
+- ``accounts`` -> account discovery (REQUIRED for kDrive)
+- ``drive`` -> kDrive (active adapter)
+- ``workspace:calendar`` -> Calendar (active adapter)
+- ``workspace:contact`` -> Contacts (active adapter)
+- ``workspace:mail`` -> Mail (adapter pending; scope reserved)
+
+Validation strategy
+-------------------
+The submit endpoint validates the PAT in three deterministic steps,
+each addressing exactly one scope:
+
+1. ``resolveAccessibleAccountIds(pat)`` -> ``GET /1/accounts`` proves
+ the ``accounts`` scope is on the PAT. Without this scope, kDrive
+ cannot enumerate the owning account_ids (a standalone or free-tier
+ kDrive lives on a *different* account_id than its kSuite
+ counterpart, so the kSuite account_id from PIM is not enough).
+
+2. ``resolveOwnerIdentity(pat)`` -> PIM Calendar (preferred) or PIM
+ Contacts (fallback) yields the user's display name + their kSuite
+ account_id, used purely for connection labelling. This also proves
+ that at least one of ``workspace:calendar`` / ``workspace:contact``
+ is on the PAT (the connection would otherwise be blank in the UI).
+
+3. ``GET /2/drive?account_id={firstAccountId}`` is the final scope
+ probe -- 200 means the ``drive`` scope is present. 401/403 means
+ the scope is missing.
+
+Mail has no separate probe: its scope is recorded in ``grantedScopes``
+so a future adapter can pick it up without re-issuing the token.
"""
-from fastapi import APIRouter, HTTPException, Request, status, Depends, Query
-from fastapi.responses import HTMLResponse, RedirectResponse
+from fastapi import APIRouter, HTTPException, Request, status, Depends, Path, Body
import logging
-import json
-import time
from typing import Dict, Any
-from urllib.parse import urlencode
+import hashlib
import httpx
-from jose import jwt as jose_jwt
-from jose import JWTError
-from modules.shared.configuration import APP_CONFIG
-from modules.interfaces.interfaceDbApp import getInterface, getRootInterface
+from modules.interfaces.interfaceDbApp import getInterface
from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatus, UserConnection
from modules.datamodels.datamodelSecurity import Token, TokenPurpose
-from modules.auth import getCurrentUser, limiter, SECRET_KEY, ALGORITHM
-from modules.auth.oauthProviderConfig import infomaniakDataScopes
-from modules.shared.timeUtils import createExpirationTimestamp, getUtcTimestamp, parseTimestamp
+from modules.auth import getCurrentUser, limiter
+from modules.shared.timeUtils import getUtcTimestamp, createExpirationTimestamp
from modules.shared.i18nRegistry import apiRouteContext
+from modules.connectors.providerInfomaniak.connectorInfomaniak import (
+ resolveOwnerIdentity,
+ resolveAccessibleAccountIds,
+ InfomaniakIdentityError,
+)
routeApiMsg = apiRouteContext("routeSecurityInfomaniak")
logger = logging.getLogger(__name__)
-_FLOW_CONNECT = "infomaniak_connect"
-
-INFOMANIAK_AUTHORIZE_URL = "https://login.infomaniak.com/authorize"
-INFOMANIAK_TOKEN_URL = "https://login.infomaniak.com/token"
INFOMANIAK_API_BASE = "https://api.infomaniak.com"
-CLIENT_ID = APP_CONFIG.get("Service_INFOMANIAK_DATA_CLIENT_ID")
-CLIENT_SECRET = APP_CONFIG.get("Service_INFOMANIAK_DATA_CLIENT_SECRET")
-REDIRECT_URI = APP_CONFIG.get("Service_INFOMANIAK_OAUTH_REDIRECT_URI")
-
-
-def _issue_oauth_state(claims: Dict[str, Any]) -> str:
- body = {**claims, "exp": int(time.time()) + 600}
- return jose_jwt.encode(body, SECRET_KEY, algorithm=ALGORITHM)
-
-
-def _parse_oauth_state(state: str) -> Dict[str, Any]:
- try:
- return jose_jwt.decode(state, SECRET_KEY, algorithms=[ALGORITHM])
- except JWTError as e:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST, detail=f"Invalid OAuth state: {e}"
- ) from e
-
-
-def _require_infomaniak_config():
- if not CLIENT_ID or not CLIENT_SECRET or not REDIRECT_URI:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=routeApiMsg(
- "Infomaniak OAuth is not configured "
- "(Service_INFOMANIAK_DATA_CLIENT_ID, Service_INFOMANIAK_DATA_CLIENT_SECRET, "
- "Service_INFOMANIAK_OAUTH_REDIRECT_URI)"
- ),
- )
+# Infomaniak PATs do not expire unless the user sets an explicit lifetime in
+# the Manager (up to 30 years). We persist a 10-year horizon so the central
+# tokenStatus helper does not flag the connection as "no token". Mirrors
+# ClickUp.
+_INFOMANIAK_TOKEN_EXPIRES_IN_SEC = 10 * 365 * 24 * 3600
router = APIRouter(
@@ -78,251 +81,192 @@ router = APIRouter(
)
-@router.get("/auth/connect")
-@limiter.limit("5/minute")
-def auth_connect(
- request: Request,
- connectionId: str = Query(..., description="UserConnection id"),
- currentUser: User = Depends(getCurrentUser),
-) -> RedirectResponse:
- """Start Infomaniak OAuth for an existing connection (requires gateway session)."""
+async def _probeDriveScope(client: httpx.AsyncClient, pat: str, accountId: int) -> None:
+ """Confirm the ``drive`` scope is on the PAT.
+
+ Issues ``GET /2/drive?account_id={accountId}`` -- a clean 200 means
+ both the ``drive`` scope is present and the resolved ``account_id``
+ is correct. 401/403 means the scope is missing; anything else means
+ Infomaniak is misbehaving and we refuse to persist.
+ """
+ url = f"{INFOMANIAK_API_BASE}/2/drive?account_id={accountId}"
try:
- _require_infomaniak_config()
- interface = getInterface(currentUser)
- connections = interface.getUserConnections(currentUser.id)
- connection = None
- for conn in connections:
- if conn.id == connectionId and conn.authority == AuthAuthority.INFOMANIAK:
- connection = conn
- break
- if not connection:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail=routeApiMsg("Infomaniak connection not found"),
- )
-
- state_jwt = _issue_oauth_state(
- {
- "flow": _FLOW_CONNECT,
- "connectionId": connectionId,
- "userId": str(currentUser.id),
- }
+ resp = await client.get(
+ url,
+ headers={"Authorization": f"Bearer {pat}", "Accept": "application/json"},
)
- query = urlencode(
- {
- "client_id": CLIENT_ID,
- "response_type": "code",
- "access_type": "offline",
- "redirect_uri": REDIRECT_URI,
- "scope": " ".join(infomaniakDataScopes),
- "state": state_jwt,
- }
- )
- auth_url = f"{INFOMANIAK_AUTHORIZE_URL}?{query}"
- return RedirectResponse(auth_url)
- except HTTPException:
- raise
- except Exception as e:
- logger.error(f"Error initiating Infomaniak connect: {str(e)}")
+ except httpx.HTTPError as e:
+ logger.error(f"Infomaniak drive-probe network error ({url}): {e}")
raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to initiate Infomaniak connect: {str(e)}",
+ status_code=status.HTTP_502_BAD_GATEWAY,
+ detail=routeApiMsg("Could not reach Infomaniak to validate the token"),
)
-
-@router.get("/auth/connect/callback")
-async def auth_connect_callback(
- code: str = Query(...),
- state: str = Query(...),
-) -> HTMLResponse:
- """OAuth callback for Infomaniak data connection."""
- state_data = _parse_oauth_state(state)
- if state_data.get("flow") != _FLOW_CONNECT:
+ if resp.status_code == 200:
+ return
+ if resp.status_code in (401, 403):
+ logger.warning(
+ f"Infomaniak drive-probe rejected PAT ({url}): "
+ f"{resp.status_code} {resp.text[:200]}"
+ )
raise HTTPException(
- status_code=400, detail=routeApiMsg("Invalid OAuth flow for this callback")
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=routeApiMsg(
+ "Token rejected by Infomaniak (missing scope 'drive'). "
+ "Required scopes: 'drive' (kDrive) and 'workspace:calendar' "
+ "(or 'workspace:contact'). Recommended for upcoming "
+ "services: 'workspace:mail'."
+ ),
)
- connection_id = state_data.get("connectionId")
- user_id = state_data.get("userId")
- if not connection_id or not user_id:
+ logger.error(
+ f"Infomaniak drive-probe unexpected response ({url}): "
+ f"{resp.status_code} {resp.text[:200]}"
+ )
+ raise HTTPException(
+ status_code=status.HTTP_502_BAD_GATEWAY,
+ detail=routeApiMsg(
+ "Infomaniak drive-probe returned an unexpected response."
+ ),
+ )
+
+
+@router.post("/connections/{connectionId}/token")
+@limiter.limit("10/minute")
+async def submit_infomaniak_token(
+ request: Request,
+ connectionId: str = Path(..., description="UserConnection id"),
+ body: Dict[str, Any] = Body(..., description="{ 'token': '' }"),
+ currentUser: User = Depends(getCurrentUser),
+) -> Dict[str, Any]:
+ """Validate and persist an Infomaniak Personal Access Token (PAT).
+
+ Body:
+ { "token": "" }
+
+ Validation order (all three must succeed before persisting):
+ 1. ``resolveAccessibleAccountIds(pat)`` -> proves the
+ ``accounts`` scope is on the PAT (required for kDrive
+ account discovery).
+ 2. ``resolveOwnerIdentity(pat)`` -> display name + kSuite
+ account_id for the connection UI label.
+ 3. ``/2/drive?account_id=`` -> proves the ``drive``
+ scope is on the PAT.
+
+ No data derived from the PAT is stored as adapter state -- both
+ account list and owner identity are re-resolved lazily by the
+ adapters at request time.
+ """
+ pat = (body or {}).get("token")
+ if not isinstance(pat, str) or not pat.strip():
raise HTTPException(
- status_code=400, detail=routeApiMsg("Missing connection or user in OAuth state")
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=routeApiMsg("Missing 'token' in request body"),
)
+ pat = pat.strip()
- _require_infomaniak_config()
-
- async with httpx.AsyncClient() as client:
- token_resp = await client.post(
- INFOMANIAK_TOKEN_URL,
- data={
- "grant_type": "authorization_code",
- "client_id": CLIENT_ID,
- "client_secret": CLIENT_SECRET,
- "code": code,
- "redirect_uri": REDIRECT_URI,
- },
- headers={"Content-Type": "application/x-www-form-urlencoded"},
- timeout=30.0,
- )
- if token_resp.status_code != 200:
- logger.error(
- f"Infomaniak token exchange failed: {token_resp.status_code} {token_resp.text}"
- )
- return HTMLResponse(
- content=f"Connection Failed
{token_resp.text}
",
- status_code=400,
- )
- token_json = token_resp.json()
- access_token = token_json.get("access_token")
- refresh_token = token_json.get("refresh_token", "")
- expires_in = int(token_json.get("expires_in", 0))
- granted_scopes = token_json.get("scope", "")
-
- if not access_token:
- return HTMLResponse(
- content="Connection Failed
No access token.
",
- status_code=400,
- )
-
- rootInterface = getRootInterface()
- if not refresh_token:
- try:
- existing_tokens = rootInterface.getTokensByConnectionIdAndAuthority(
- connection_id, AuthAuthority.INFOMANIAK
- )
- if existing_tokens:
- existing_tokens.sort(
- key=lambda x: parseTimestamp(x.createdAt, default=0), reverse=True
- )
- refresh_token = existing_tokens[0].tokenRefresh or ""
- except Exception:
- pass
-
- async with httpx.AsyncClient() as client:
- profile_resp = await client.get(
- f"{INFOMANIAK_API_BASE}/1/profile",
- headers={
- "Authorization": f"Bearer {access_token}",
- "Accept": "application/json",
- },
- timeout=30.0,
- )
- if profile_resp.status_code != 200:
- logger.error(
- f"Infomaniak profile lookup failed: {profile_resp.status_code} {profile_resp.text}"
- )
- return HTMLResponse(
- content="Connection Failed
Could not load Infomaniak profile.
",
- status_code=400,
- )
- profile_payload = profile_resp.json()
- profile = profile_payload.get("data") if isinstance(profile_payload, dict) else None
- profile = profile or {}
-
- user = rootInterface.getUser(user_id)
- if not user:
- return HTMLResponse(
- content="""
-
- """,
- status_code=404,
- )
-
- interface = getInterface(user)
- connections = interface.getUserConnections(user_id)
+ interface = getInterface(currentUser)
connection = None
- for conn in connections:
- if conn.id == connection_id:
+ for conn in interface.getUserConnections(currentUser.id):
+ if conn.id == connectionId and conn.authority == AuthAuthority.INFOMANIAK:
connection = conn
break
if not connection:
- return HTMLResponse(
- content="""
-
- """,
- status_code=404,
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=routeApiMsg("Infomaniak connection not found"),
)
- ext_id = str(profile.get("id", "")) if profile.get("id") is not None else ""
- username = profile.get("login") or profile.get("email") or ext_id
- email = profile.get("email")
+ try:
+ accountIds = await resolveAccessibleAccountIds(pat)
+ except InfomaniakIdentityError as e:
+ logger.warning(
+ f"Infomaniak token submit for connection {connectionId} could not "
+ f"list accounts: {e}"
+ )
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=routeApiMsg(
+ "Token rejected by Infomaniak (missing scope 'accounts'). "
+ "kDrive needs the 'accounts' scope to discover the owning "
+ "Infomaniak account. Required scopes: 'accounts', 'drive', "
+ "'workspace:calendar', 'workspace:contact'."
+ ),
+ )
- expires_at = createExpirationTimestamp(expires_in)
- granted_scopes_list = (
- granted_scopes
- if isinstance(granted_scopes, list)
- else (granted_scopes.split(" ") if granted_scopes else infomaniakDataScopes)
- )
+ try:
+ identity = await resolveOwnerIdentity(pat)
+ except InfomaniakIdentityError as e:
+ logger.warning(
+ f"Infomaniak token submit for connection {connectionId} could not "
+ f"resolve owner identity: {e}"
+ )
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=routeApiMsg(
+ "Could not derive your Infomaniak account from the token. "
+ "Please ensure the PAT carries 'workspace:calendar' or "
+ "'workspace:contact' so we can identify your account."
+ ),
+ )
+
+ async with httpx.AsyncClient(timeout=15.0, follow_redirects=False) as client:
+ await _probeDriveScope(client, pat, accountIds[0])
+
+ tokenFingerprint = "pat-" + hashlib.sha256(pat.encode("utf-8")).hexdigest()[:8]
+ username = identity["displayName"] or f"infomaniak-{tokenFingerprint}"
+ expiresAt = createExpirationTimestamp(_INFOMANIAK_TOKEN_EXPIRES_IN_SEC)
try:
connection.status = ConnectionStatus.ACTIVE
connection.lastChecked = getUtcTimestamp()
- connection.expiresAt = expires_at
- connection.externalId = ext_id
+ connection.expiresAt = expiresAt
+ connection.externalId = str(identity["accountId"])
connection.externalUsername = username
- if email:
- connection.externalEmail = email
- connection.grantedScopes = granted_scopes_list
- rootInterface.db.recordModify(UserConnection, connection_id, connection.model_dump())
+ connection.grantedScopes = [
+ "accounts",
+ "drive",
+ "workspace:mail",
+ "workspace:calendar",
+ "workspace:contact",
+ ]
+ interface.db.recordModify(UserConnection, connectionId, connection.model_dump())
token = Token(
- userId=user.id,
+ userId=currentUser.id,
authority=AuthAuthority.INFOMANIAK,
- connectionId=connection_id,
+ connectionId=connectionId,
tokenPurpose=TokenPurpose.DATA_CONNECTION,
- tokenAccess=access_token,
- tokenRefresh=refresh_token,
- tokenType=token_json.get("token_type", "bearer"),
- expiresAt=expires_at,
+ tokenAccess=pat,
+ tokenRefresh=None,
+ tokenType="bearer",
+ expiresAt=expiresAt,
createdAt=getUtcTimestamp(),
)
interface.saveConnectionToken(token)
- return HTMLResponse(
- content=f"""
-
- Connection Successful
-
-
-
-
- """
+ logger.info(
+ f"Infomaniak PAT stored for connection {connectionId} "
+ f"(user {currentUser.id}, externalUsername={username}, "
+ f"kSuiteAccountId={identity['accountId']}, "
+ f"accessibleAccounts={accountIds})"
)
+
+ return {
+ "id": connection.id,
+ "status": "connected",
+ "type": "infomaniak",
+ "externalUsername": username,
+ "externalEmail": None,
+ "lastChecked": connection.lastChecked,
+ }
+ except HTTPException:
+ raise
except Exception as e:
- logger.error(f"Error updating Infomaniak connection: {str(e)}", exc_info=True)
- return HTMLResponse(
- content=f"""
-
- """,
- status_code=500,
+ logger.error(
+ f"Error persisting Infomaniak token for connection {connectionId}: {e}",
+ exc_info=True,
+ )
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=routeApiMsg("Failed to store Infomaniak token"),
)
diff --git a/modules/routes/routeSecurityMsft.py b/modules/routes/routeSecurityMsft.py
index 88575881..cc4cb87b 100644
--- a/modules/routes/routeSecurityMsft.py
+++ b/modules/routes/routeSecurityMsft.py
@@ -244,9 +244,15 @@ async def auth_login_callback(
def auth_connect(
request: Request,
connectionId: str = Query(..., description="UserConnection id"),
+ reauth: Optional[int] = Query(0, description="If 1, force the consent screen so newly added scopes are granted"),
currentUser: User = Depends(getCurrentUser),
) -> RedirectResponse:
- """Start Microsoft Data OAuth for an existing connection."""
+ """Start Microsoft Data OAuth for an existing connection.
+
+ With ``reauth=1`` the consent screen is forced (``prompt=consent``) so the
+ user re-grants permissions and any newly added scopes (e.g. Calendars.Read,
+ Contacts.Read) actually land on the access token.
+ """
try:
_require_msft_data_config()
interface = getInterface(currentUser)
@@ -280,6 +286,8 @@ def auth_connect(
if "@" in login_hint:
login_kwargs["domain_hint"] = login_hint.split("@", 1)[1]
login_kwargs["prompt"] = "login"
+ if reauth:
+ login_kwargs["prompt"] = "consent"
auth_url = msal_app.get_authorization_request_url(
scopes=msftDataScopes,
diff --git a/tests/unit/aicore/__init__.py b/tests/unit/aicore/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/aicore/test_aicorePluginOpenai_temperature.py b/tests/unit/aicore/test_aicorePluginOpenai_temperature.py
new file mode 100644
index 00000000..eb2d7cec
--- /dev/null
+++ b/tests/unit/aicore/test_aicorePluginOpenai_temperature.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Unit tests: temperature handling for OpenAI chat-completions models.
+
+Historical regression: every payload sent ``temperature=0.2``. After the
+GPT-5 launch OpenAI rejects any non-default temperature for the GPT-5.x
+and o-series (o1/o3/o4) reasoning models with HTTP 400::
+
+ "Unsupported value: 'temperature' does not support 0.2 with this
+ model. Only the default (1) value is supported."
+
+The fix is a single helper, ``_supportsCustomTemperature``, that is
+consulted before adding the field to the outgoing payload. These tests
+pin the contract:
+
+* legacy chat models (gpt-4o, gpt-4o-mini, gpt-4.1, gpt-3.5-*) keep
+ honoring custom temperatures,
+* every gpt-5.x and o1/o3/o4 variant must omit the field entirely.
+"""
+from __future__ import annotations
+
+import pytest
+
+from modules.aicore.aicorePluginOpenai import _supportsCustomTemperature
+
+
+class TestSupportsCustomTemperature:
+ """Pure model-name classification - no network, no payload assembly."""
+
+ @pytest.mark.parametrize(
+ "modelName",
+ [
+ "gpt-4o",
+ "gpt-4o-mini",
+ "gpt-4.1",
+ "gpt-3.5-turbo",
+ "text-embedding-3-small",
+ "dall-e-3",
+ ],
+ )
+ def testLegacyModelsAcceptCustomTemperature(self, modelName):
+ assert _supportsCustomTemperature(modelName) is True
+
+ @pytest.mark.parametrize(
+ "modelName",
+ [
+ "gpt-5",
+ "gpt-5.4",
+ "gpt-5.4-mini",
+ "gpt-5.4-nano",
+ "gpt-5.5",
+ "GPT-5.5",
+ "o1",
+ "o1-mini",
+ "o3",
+ "o3-mini",
+ "o4-mini",
+ ],
+ )
+ def testReasoningModelsRejectCustomTemperature(self, modelName):
+ assert _supportsCustomTemperature(modelName) is False
+
+ def testEmptyOrNoneModelDefaultsToSupported(self):
+ # Defensive: unknown/empty names should not silently break legacy paths.
+ assert _supportsCustomTemperature("") is True
+ assert _supportsCustomTemperature(None) is True