Merge pull request #76 from valueonag/feat/features-workflow-integration

cleaned up dependencies and code architecture
This commit is contained in:
ValueOn AG 2025-12-09 23:27:01 +01:00 committed by GitHub
commit 8b40698ba1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
71 changed files with 5332 additions and 2148 deletions

11
app.py
View file

@ -1,5 +1,7 @@
import os
import sys
import unicodedata
from urllib.parse import quote_plus
os.environ["NUMEXPR_MAX_THREADS"] = "12"
@ -141,8 +143,6 @@ def initLogging():
def filter(self, record):
if isinstance(record.msg, str):
# Remove only emojis, preserve other Unicode characters like quotes
import re
import unicodedata
# Remove emoji characters specifically
record.msg = "".join(
@ -365,8 +365,8 @@ app.add_middleware(
)
# CSRF protection middleware
from modules.security.csrf import CSRFMiddleware
from modules.security.tokenRefreshMiddleware import (
from modules.auth import CSRFMiddleware
from modules.auth import (
TokenRefreshMiddleware,
ProactiveTokenRefreshMiddleware,
)
@ -443,3 +443,6 @@ app.include_router(rbacRouter)
from modules.routes.routeOptions import router as optionsRouter
app.include_router(optionsRouter)
from modules.routes.routeMessaging import router as messagingRouter
app.include_router(messagingRouter)

View file

@ -1,229 +0,0 @@
# Frontend Options Usage Guide
## Overview
The `frontend_options` attribute in Pydantic `Field` definitions supports **two formats** for providing options to frontend select/multiselect fields:
1. **Static List**: Predefined list of options
2. **String Reference**: Dynamic options fetched from the Options API
## Type System
The type system is defined in `gateway/modules/shared/frontendOptionsTypes.py`:
```python
from modules.shared.frontendOptionsTypes import FrontendOptions, OptionItem
# FrontendOptions is Union[List[OptionItem], str]
# OptionItem is Dict[str, Any] with "value" and "label" keys
```
## Format 1: Static List
Use static lists for fixed, predefined options that don't change based on user context.
### Example
```python
from pydantic import Field
from typing import List
language: str = Field(
default="en",
description="Preferred language",
json_schema_extra={
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": [
{"value": "en", "label": {"en": "English", "fr": "Anglais"}},
{"value": "fr", "label": {"en": "Français", "fr": "Français"}},
{"value": "de", "label": {"en": "Deutsch", "fr": "Allemand"}},
]
}
)
```
### When to Use Static Lists
- Options are fixed constants (e.g., enum values)
- Options don't require database queries
- Options are the same for all users
- Options are simple and don't change frequently
## Format 2: String Reference
Use string references for dynamic options that come from the database or are context-aware.
### Example
```python
from pydantic import Field
from typing import List
roleLabels: List[str] = Field(
default_factory=list,
description="List of role labels",
json_schema_extra={
"frontend_type": "multiselect",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": "user.role" # String reference
}
)
```
### When to Use String References
- Options come from the database (e.g., user connections)
- Options are context-aware (filtered by current user's permissions)
- Options need centralized management
- Options may change frequently
- Options depend on user context or permissions
### Frontend Integration
When the frontend encounters a string reference:
1. **Detect**: Check if `frontend_options` is a string (not a list)
2. **Fetch**: Call `GET /api/options/{optionsName}` (e.g., `/api/options/user.role`)
3. **Use**: Use the returned options for the select/multiselect field
**Example Frontend Code**:
```typescript
// Pseudocode
if (typeof field.frontend_options === 'string') {
// Dynamic options - fetch from API
const options = await fetch(`/api/options/${field.frontend_options}`);
return options;
} else {
// Static options - use directly
return field.frontend_options;
}
```
## Available Option Names
| Option Name | Description | Context-Aware |
|-------------|-------------|---------------|
| `user.role` | Standard role options (sysadmin, admin, user, viewer) | No |
| `auth.authority` | Authentication authority options (local, google, msft) | No |
| `connection.status` | Connection status options (active, inactive, expired, error) | No |
| `user.connection` | User's connections (fetched from database) | Yes (requires currentUser) |
## Utility Functions
The `frontendOptionsTypes` module provides utility functions:
```python
from modules.shared.frontendOptionsTypes import (
isStringReference,
isStaticList,
validateFrontendOptions,
getOptionsName,
getStaticOptions
)
# Check format
if isStringReference(frontend_options):
optionsName = getOptionsName(frontend_options)
# Fetch from API: /api/options/{optionsName}
elif isStaticList(frontend_options):
options = getStaticOptions(frontend_options)
# Use directly
# Validate format
if not validateFrontendOptions(frontend_options):
raise ValueError("Invalid frontend_options format")
```
## Validation
The `validateFrontendOptions()` function ensures:
1. **String References**: Non-empty string
2. **Static Lists**:
- List of dictionaries
- Each dictionary has `"value"` and `"label"` keys
- `"label"` is a dictionary (multilingual labels)
## Examples in Codebase
### Static List Example
```python
# datamodelUam.py - Language field
language: str = Field(
default="en",
json_schema_extra={
"frontend_options": [
{"value": "en", "label": {"en": "English", "fr": "Anglais"}},
{"value": "fr", "label": {"en": "Français", "fr": "Français"}},
]
}
)
```
### String Reference Example
```python
# datamodelUam.py - Role labels field
roleLabels: List[str] = Field(
default_factory=list,
json_schema_extra={
"frontend_options": "user.role" # Dynamic - fetched from API
}
)
```
### Mixed Example
```python
# datamodelRbac.py - AccessRule model
roleLabel: str = Field(
json_schema_extra={
"frontend_options": "user.role" # String reference
}
)
context: AccessRuleContext = Field(
json_schema_extra={
"frontend_options": [ # Static list
{"value": "DATA", "label": {"en": "Data", "fr": "Données"}},
{"value": "UI", "label": {"en": "UI", "fr": "Interface"}},
{"value": "RESOURCE", "label": {"en": "Resource", "fr": "Ressource"}}
]
}
)
```
## Best Practices
1. **Use Static Lists** for:
- Enum values
- Fixed constants
- Simple options that don't change
2. **Use String References** for:
- Database-driven options
- Context-aware options
- Options that need centralized management
3. **Always validate** frontend_options format when processing
4. **Document** which format is used and why in field descriptions
5. **Frontend**: Always check the type before using options
## Migration Guide
If you have existing static lists that should become dynamic:
1. **Create Options Provider**: Add option logic to `gateway/modules/features/options/mainOptions.py`
2. **Register Option Name**: Add to `getAvailableOptionsNames()` function
3. **Update Field**: Change `frontend_options` from list to string reference
4. **Update Frontend**: Ensure frontend handles string references correctly
## See Also
- `gateway/modules/shared/frontendOptionsTypes.py` - Type definitions and utilities
- `gateway/modules/features/options/mainOptions.py` - Options API implementation
- `gateway/modules/routes/routeOptions.py` - Options API endpoints
- `wiki/appdoc/doc_security_role_based_access.md` - RBAC documentation with frontend_options examples

View file

@ -1,372 +0,0 @@
# RBAC Admin Roles Management & Options API
## Overview
This document describes two new features added to support RBAC management:
1. **Options API**: Dynamic options endpoint for frontend select/multiselect fields
2. **Admin RBAC Roles Module**: Comprehensive role and role assignment management
---
## 1. Options API
### Purpose
The Options API provides dynamic options for frontend form fields that use `frontend_options` as a string reference (e.g., `"user.role"`). This allows the frontend to fetch options from the backend, enabling:
- Database-driven options (e.g., user connections)
- Context-aware options (filtered by current user's permissions)
- Centralized option management
### Frontend Options Format
The `frontend_options` attribute in Pydantic `Field` definitions supports **two formats**:
#### 1. Static List (for basic data types)
```python
frontend_options=[
{"value": "a", "label": {"en": "All Records", "fr": "Tous les enregistrements"}},
{"value": "m", "label": {"en": "My Records", "fr": "Mes enregistrements"}}
]
```
#### 2. String Reference (for dynamic/custom types)
```python
frontend_options="user.role" # Frontend fetches from /api/options/user.role
```
### API Endpoints
#### Get Options
```
GET /api/options/{optionsName}
```
**Path Parameters:**
- `optionsName`: Name of the options set (e.g., "user.role", "user.connection")
**Response:**
```json
[
{
"value": "sysadmin",
"label": {
"en": "System Administrator",
"fr": "Administrateur système"
}
},
{
"value": "admin",
"label": {
"en": "Administrator",
"fr": "Administrateur"
}
}
]
```
**Examples:**
- `GET /api/options/user.role` - Get available role options
- `GET /api/options/user.connection` - Get user's connections (context-aware)
- `GET /api/options/auth.authority` - Get authentication authority options
- `GET /api/options/connection.status` - Get connection status options
#### List Available Options
```
GET /api/options/
```
**Response:**
```json
[
"user.role",
"auth.authority",
"connection.status",
"user.connection"
]
```
### Available Options
| Options Name | Description | Context-Aware |
|-------------|------------|---------------|
| `user.role` | Standard role options (sysadmin, admin, user, viewer) | No |
| `auth.authority` | Authentication authority options (local, google, msft) | No |
| `connection.status` | Connection status options (active, inactive, expired, error) | No |
| `user.connection` | User's connections (fetched from database) | Yes (requires currentUser) |
### Implementation
**Files:**
- `gateway/modules/features/options/mainOptions.py` - Options logic
- `gateway/modules/routes/routeOptions.py` - Options API endpoints
**Usage in Pydantic Models:**
```python
roleLabels: List[str] = Field(
default_factory=list,
description="List of role labels",
json_schema_extra={
"frontend_type": "multiselect",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": "user.role" # String reference
}
)
```
---
## 2. Admin RBAC Roles Module
### Purpose
The Admin RBAC Roles module provides comprehensive management of roles and role assignments to users. This module allows administrators to:
- View all available roles with metadata
- List users with their role assignments
- Assign/remove roles to/from users
- Filter users by role or mandate
- View role statistics (user counts per role)
### Access Control
**Required Permissions:**
- User must have `admin` or `sysadmin` role
- RBAC permission check for `UserInDB` table update operations
### API Endpoints
#### List All Roles
```
GET /api/admin/rbac/roles/
```
**Response:**
```json
[
{
"roleLabel": "sysadmin",
"description": {
"en": "System Administrator - Full access to all system resources",
"fr": "Administrateur système - Accès complet à toutes les ressources"
},
"userCount": 2,
"isSystemRole": true
},
{
"roleLabel": "admin",
"description": {
"en": "Administrator - Manage users and resources within mandate scope",
"fr": "Administrateur - Gérer les utilisateurs et ressources dans le périmètre du mandat"
},
"userCount": 5,
"isSystemRole": true
}
]
```
#### List Users with Roles
```
GET /api/admin/rbac/roles/users?roleLabel=admin&mandateId=mandate-123
```
**Query Parameters:**
- `roleLabel` (optional): Filter by role label
- `mandateId` (optional): Filter by mandate ID
**Response:**
```json
[
{
"id": "user-123",
"username": "john.doe",
"email": "john@example.com",
"fullName": "John Doe",
"mandateId": "mandate-123",
"enabled": true,
"roleLabels": ["admin", "user"],
"roleCount": 2
}
]
```
#### Get User Roles
```
GET /api/admin/rbac/roles/users/{userId}
```
**Response:**
```json
{
"id": "user-123",
"username": "john.doe",
"email": "john@example.com",
"fullName": "John Doe",
"mandateId": "mandate-123",
"enabled": true,
"roleLabels": ["admin", "user"],
"roleCount": 2
}
```
#### Update User Roles
```
PUT /api/admin/rbac/roles/users/{userId}/roles
```
**Request Body:**
```json
{
"roleLabels": ["admin", "user"]
}
```
**Response:**
Updated user object with new role assignments
#### Add Role to User
```
POST /api/admin/rbac/roles/users/{userId}/roles/{roleLabel}
```
**Response:**
Updated user object with role added (if not already present)
#### Remove Role from User
```
DELETE /api/admin/rbac/roles/users/{userId}/roles/{roleLabel}
```
**Response:**
Updated user object with role removed
**Note:** If all roles are removed, user defaults to `"user"` role
#### Get Users with Specific Role
```
GET /api/admin/rbac/roles/roles/{roleLabel}/users?mandateId=mandate-123
```
**Query Parameters:**
- `mandateId` (optional): Filter by mandate ID
**Response:**
List of users with the specified role
### Standard Roles
| Role Label | Description | System Role |
|-----------|-------------|-------------|
| `sysadmin` | System Administrator - Full access to all system resources | Yes |
| `admin` | Administrator - Manage users and resources within mandate scope | Yes |
| `user` | User - Standard user with access to own records | Yes |
| `viewer` | Viewer - Read-only access to group records | Yes |
**Custom Roles:** The system also supports custom role labels. These are detected when users are assigned non-standard roles and are marked with `isSystemRole: false`.
### Implementation
**Files:**
- `gateway/modules/routes/routeAdminRbacRoles.py` - Admin RBAC Roles API endpoints
**Dependencies:**
- `gateway/modules/interfaces/interfaceDbAppObjects.py` - User management interface
- `gateway/modules/security/auth.py` - Authentication and authorization
### Usage Examples
#### Assign Multiple Roles to User
```bash
curl -X PUT "http://localhost:8000/api/admin/rbac/roles/users/user-123/roles" \
-H "Authorization: Bearer <token>" \
-H "Content-Type: application/json" \
-d '{"roleLabels": ["admin", "user"]}'
```
#### Add Single Role
```bash
curl -X POST "http://localhost:8000/api/admin/rbac/roles/users/user-123/roles/admin" \
-H "Authorization: Bearer <token>"
```
#### Remove Role
```bash
curl -X DELETE "http://localhost:8000/api/admin/rbac/roles/users/user-123/roles/viewer" \
-H "Authorization: Bearer <token>"
```
#### List All Admins
```bash
curl "http://localhost:8000/api/admin/rbac/roles/roles/admin/users" \
-H "Authorization: Bearer <token>"
```
---
## Integration
### Route Registration
Both modules are registered in `gateway/app.py`:
```python
from modules.routes.routeOptions import router as optionsRouter
app.include_router(optionsRouter)
from modules.routes.routeAdminRbacRoles import router as adminRbacRolesRouter
app.include_router(adminRbacRolesRouter)
```
### Frontend Integration
#### Using Dynamic Options
When a Pydantic model field uses `frontend_options` as a string reference:
```python
roleLabels: List[str] = Field(
frontend_options="user.role"
)
```
The frontend should:
1. Detect the string reference (not a list)
2. Fetch options from `/api/options/user.role`
3. Use the returned options for the select/multiselect field
#### Using Admin RBAC Roles Module
The frontend can use the Admin RBAC Roles endpoints to:
- Display role management UI
- Show role assignments in user management
- Provide role assignment controls
- Display role statistics
---
## Security Considerations
1. **Options API**:
- Requires authentication (currentUser dependency)
- Context-aware options (e.g., `user.connection`) are filtered by current user
- Rate limited: 120 requests/minute
2. **Admin RBAC Roles Module**:
- Requires `admin` or `sysadmin` role
- All endpoints are rate limited: 30-60 requests/minute
- RBAC permission checks ensure users can only manage roles if they have permission
---
## Future Enhancements
1. **Options API**:
- Add more option types (e.g., mandate options, workflow options)
- Support for filtered options based on RBAC permissions
- Caching for frequently accessed options
2. **Admin RBAC Roles Module**:
- Role metadata management (descriptions, permissions summary)
- Bulk role assignment operations
- Role usage analytics
- Role templates/presets

View file

@ -0,0 +1,292 @@
# Module Dependencies Analysis
This document provides a comprehensive analysis of import dependencies between modules in the `modules` directory.
## Overview
The codebase is organized into the following top-level modules:
- **aicore** - AI core functionality and model management
- **auth** - High-level authentication and token management
- **connectors** - External service connectors
- **datamodels** - Data models and schemas
- **features** - Feature modules (workflow, dynamicOptions, etc.)
- **interfaces** - Database and service interfaces
- **routes** - API route handlers
- **security** - Low-level core security (RBAC and root access)
- **services** - Business logic services
- **shared** - Shared utilities and helpers
- **workflows** - Workflow processing and management
## Bidirectional Dependency Matrix
This table shows all module pairs with dependencies, displaying imports in both directions.
| Module X | Module Y | X → Y | Y → X | Total |
|----------|----------|-------|-------|-------|
| aicore | connectors | 1 | 0 | 1 |
| aicore | datamodels | 13 | 0 | 13 |
| aicore | interfaces | 0 | 2 | 2 |
| aicore | security | 2 | 0 | 2 |
| aicore | services | 0 | 2 | 2 |
| aicore | shared | 5 | 0 | 5 |
| auth | datamodels | 5 | 0 | 5 |
| auth | interfaces | 4 | 0 | 4 |
| auth | routes | 0 | 32 | 32 |
| auth | security | 4 | 0 | 4 |
| auth | services | 0 | 1 | 1 |
| auth | shared | 8 | 0 | 8 |
| connectors | datamodels | 4 | 0 | 4 |
| connectors | interfaces | 0 | 10 | 10 |
| connectors | shared | 5 | 0 | 5 |
| datamodels | features | 0 | 6 | 6 |
| datamodels | interfaces | 0 | 27 | 27 |
| datamodels | routes | 0 | 48 | 48 |
| datamodels | security | 0 | 5 | 5 |
| datamodels | services | 0 | 52 | 52 |
| datamodels | shared | 19 | 0 | 19 |
| datamodels | workflows | 0 | 72 | 72 |
| features | interfaces | 0 | 0 | 0 |
| features | routes | 0 | 6 | 6 |
| features | services | 4 | 0 | 4 |
| features | shared | 3 | 0 | 3 |
| features | workflows | 1 | 0 | 1 |
| interfaces | routes | 0 | 29 | 29 |
| interfaces | security | 9 | 0 | 9 |
| interfaces | services | 0 | 8 | 8 |
| interfaces | shared | 11 | 0 | 11 |
| routes | interfaces | 29 | 0 | 29 |
| routes | services | 5 | 0 | 5 |
| routes | shared | 21 | 0 | 21 |
| security | connectors | 2 | 0 | 2 |
| security | datamodels | 5 | 0 | 5 |
| services | shared | 16 | 0 | 16 |
| services | workflows | 0 | 1 | 1 |
| shared | workflows | 0 | 9 | 9 |
**Legend:**
- **X → Y**: Number of imports from Module X to Module Y
- **Y → X**: Number of imports from Module Y to Module X
- **Total**: Sum of imports in both directions
## Bidirectional Dependencies Only (Circular Dependencies)
This table shows only module pairs where imports exist in **both directions**, indicating potential circular dependencies that should be monitored.
| Module X | Module Y | X → Y | Y → X | Total |
|----------|----------|-------|-------|-------|
**Total bidirectional dependencies: 0**
**Note:** All circular dependencies have been eliminated. The architecture now has clean one-way dependencies.
**Key Improvements:**
1. **Eliminated `connectors ↔ security` circular dependency**: After moving RBAC logic from `connectorDbPostgre.py` to `interfaces/interfaceRbac.py`, connectors no longer import from security. Security still imports from connectors (for `rootAccess` to create `DatabaseConnector` instances), but this is a one-way dependency (security → connectors: 2, connectors → security: 0).
2. **Eliminated `shared ↔ security` circular dependency**: Moved `rbacHelpers.py` from `shared` to `security` module since it was only used in `aicore` and `aicore` already imports from `security`. This eliminates the architectural violation where `shared` imported from `security`.
3. **Eliminated `datamodels ↔ shared` circular dependency**: `shared` no longer has any static imports from `datamodels`. The only reference is a dynamic import in `attributeUtils.py` using `importlib.import_module()` for runtime model discovery, which is not detected by static analysis. This is acceptable as it's a runtime-only dependency.
4. **New `interfaces/interfaceRbac.py` module**: Created to handle RBAC filtering for interfaces, importing from both `security` and `connectors`. This maintains proper architectural layering where connectors remain generic.
5. **Updated dependency counts**:
- `interfaces``connectors`: increased from 9 to 10 (interfaceRbac imports connectorDbPostgre)
- `interfaces``security`: increased from 7 to 9 (interfaceRbac imports rbac and rootAccess)
- `features``interfaces`: increased from 1 to 2 (mainWorkflow imports interfaceRbac)
- `routes``interfaces`: increased from 28 to 29 (routeWorkflows imports interfaceRbac)
- `aicore``security`: increased from 1 to 2 (now imports rbacHelpers from security)
- `security``datamodels`: increased from 3 to 5 (rbacHelpers adds datamodel imports)
## Dependency Graph (Mermaid)
```mermaid
graph TD
aicore[aicore]
auth[auth]
connectors[connectors]
datamodels[datamodels]
features[features]
interfaces[interfaces]
routes[routes]
security[security]
services[services]
shared[shared]
workflows[workflows]
aicore -->|13| datamodels
aicore -->|1| connectors
aicore -->|2| security
aicore -->|5| shared
auth -->|5| datamodels
auth -->|4| interfaces
auth -->|4| security
auth -->|8| shared
connectors -->|4| datamodels
connectors -->|5| shared
datamodels -->|19| shared
features -->|6| datamodels
features -->|0| interfaces
features -->|4| services
features -->|3| shared
features -->|1| workflows
interfaces -->|29| datamodels
interfaces -->|10| connectors
interfaces -->|2| aicore
interfaces -->|9| security
interfaces -->|11| shared
routes -->|48| datamodels
routes -->|29| interfaces
routes -->|32| auth
routes -->|21| shared
routes -->|6| features
routes -->|5| services
security -->|5| datamodels
security -->|2| connectors
security -->|1| shared
services -->|52| datamodels
services -->|8| interfaces
services -->|2| aicore
services -->|1| auth
services -->|16| shared
workflows -->|72| datamodels
workflows -->|1| services
workflows -->|9| shared
```
## Detailed Module Dependencies
### aicore
**Imports from:**
- `connectors` (1 import)
- `datamodels` (13 imports)
- `security` (2 imports: rbac, rbacHelpers)
- `shared` (4 imports)
**Dependencies:** Low-level AI functionality, depends on data models and connectors.
### auth
**Imports from:**
- `datamodels` (5 imports)
- `interfaces` (4 imports)
- `security` (4 imports)
- `shared` (8 imports)
**Dependencies:** High-level authentication and token management, used by routes and services.
### connectors
**Imports from:**
- `datamodels` (4 imports)
- `shared` (5 imports)
**Dependencies:** External service connectors, minimal dependencies. No longer imports from security or interfaces. Connectors are now fully generic and do not depend on security modules.
### datamodels
**Imports from:**
- `shared` (19 imports)
**Dependencies:** Core data models, only depends on shared utilities.
### features
**Imports from:**
- `datamodels` (6 imports)
- `services` (4 imports)
- `shared` (3 imports)
- `workflows` (1 import)
**Dependencies:** Feature modules that orchestrate workflows and services. Features now use services exclusively, not interfaces directly, maintaining proper architectural layering.
### interfaces
**Imports from:**
- `aicore` (2 imports)
- `connectors` (10 imports)
- `datamodels` (29 imports)
- `security` (9 imports)
- `shared` (11 imports)
**Dependencies:** Database and service interfaces, heavily depends on data models. Includes `interfaceRbac.py` which handles RBAC filtering for all interfaces. No longer creates circular dependency with connectors.
### routes
**Imports from:**
- `auth` (32 imports)
- `datamodels` (48 imports)
- `features` (6 imports)
- `interfaces` (29 imports)
- `services` (5 imports)
- `shared` (21 imports)
**Dependencies:** API endpoints, highest dependency count, orchestrates all layers. Now imports from `auth` instead of `security` for authentication. Increased use of services (from 2 to 5 imports) after architectural refactoring to use services instead of direct interface access in features.
### security
**Imports from:**
- `connectors` (2 imports)
- `datamodels` (5 imports: rbac uses 3, rbacHelpers uses 2)
- `shared` (1 import: rootAccess uses configuration)
**Dependencies:** Low-level core security (RBAC, root access, and RBAC helper functions). Used by interfaces (including `interfaceRbac.py`), auth, and aicore. The `rbacHelpers` module was moved from `shared` to `security` to eliminate the architectural violation where `shared` imported from `security`. Security imports from connectors only for `rootAccess` to create `DatabaseConnector` instances - this is acceptable as it's a one-way dependency (security → connectors).
### services
**Imports from:**
- `aicore` (2 imports)
- `auth` (1 import)
- `datamodels` (52 imports)
- `interfaces` (8 imports)
- `shared` (16 imports)
**Dependencies:** Business logic services, heavily depends on data models.
### shared
**Imports from:**
- None (0 imports)
**Dependencies:** Shared utilities, completely self-contained with no dependencies on other modules. No longer imports from security (rbacHelpers was moved to security module) or datamodels (only uses dynamic imports at runtime for model discovery in `attributeUtils.py`), maintaining proper architectural layering.
### workflows
**Imports from:**
- `datamodels` (72 imports)
- `services` (1 import)
- `shared` (9 imports)
**Dependencies:** Workflow processing, heavily depends on data models (highest count). Reduced from 74 to 72 imports after removing unused imports from `contentValidator.py`.
## Key Observations
1. **datamodels** is the most imported module (used by 9 out of 11 modules)
2. **shared** is widely used but has minimal dependencies (good design)
3. **routes** has the most diverse dependencies (imports from 6 different modules)
4. **workflows** has the highest number of imports from datamodels (72)
5. **auth** is now a separate module, used exclusively by routes and services
6. **security** is now a low-level module, used by interfaces (including `interfaceRbac.py`)
7. **connectors** are now fully generic - no dependencies on security or interfaces
8. **Circular dependencies eliminated**: Reduced from 3 to 0 after RBAC refactoring and `rbacHelpers` move (eliminated `connectors ↔ security`, `shared ↔ security`, and `datamodels ↔ shared`)
9. **New `interfaceRbac.py` module** centralizes RBAC filtering logic for all interfaces
10. **`shared` module is now completely self-contained** - no static imports from any other module
11. **Features architectural improvements**: Features no longer import directly from interfaces (reduced from 2 to 0). All features now use services exclusively, maintaining proper layering: Features → Services → Interfaces → Connectors
12. **Routes increased services usage**: Routes now import from services 5 times (up from 2) after refactoring features to use services instead of direct interface access
## Dependency Layers
Based on the analysis, the architecture follows these layers:
1. **Foundation Layer**: `shared`, `datamodels`
2. **Core Layer**: `aicore`, `connectors`, `security`
3. **Interface Layer**: `interfaces`
4. **Authentication Layer**: `auth`
5. **Business Logic Layer**: `services`, `workflows`
6. **Feature Layer**: `features`
7. **API Layer**: `routes`
## Recommendations
1. **datamodels** should remain stable as it's a core dependency
2. **shared** is excellently designed - completely self-contained with zero dependencies (perfect foundation layer)
3. **security** split and RBAC refactoring were successful - eliminated all circular dependencies (`connectors ↔ security`, `shared ↔ security`)
4. **connectors** are now fully generic and maintainable - keep them free of security/interface dependencies
5. **interfaceRbac.py** successfully centralizes RBAC logic - consider this pattern for other cross-cutting concerns
6. Consider breaking down **workflows** if it continues to grow
7. **routes** could benefit from further abstraction to reduce direct dependencies
8. **Architecture is now clean** - no circular dependencies remain, maintaining clear separation of concerns

View file

@ -10,7 +10,7 @@ from typing import Dict, List, Optional, Any
from modules.datamodels.datamodelAi import AiModel
from modules.aicore.aicoreBase import BaseConnectorAi
from modules.datamodels.datamodelUam import User
from modules.shared.rbacHelpers import checkResourceAccess
from modules.security.rbacHelpers import checkResourceAccess
from modules.security.rbac import RbacClass
from modules.connectors.connectorDbPostgre import DatabaseConnector

39
modules/auth/__init__.py Normal file
View file

@ -0,0 +1,39 @@
"""
Authentication and authorization modules for routes and services.
High-level security functionality that depends on FastAPI and interfaces.
"""
from .authentication import getCurrentUser, limiter, SECRET_KEY, ALGORITHM, cookieAuth
from .jwtService import (
createAccessToken,
createRefreshToken,
setAccessTokenCookie,
setRefreshTokenCookie,
clearAccessTokenCookie,
clearRefreshTokenCookie
)
from .tokenManager import TokenManager
from .tokenRefreshService import token_refresh_service, TokenRefreshService
from .tokenRefreshMiddleware import TokenRefreshMiddleware, ProactiveTokenRefreshMiddleware
from .csrf import CSRFMiddleware
__all__ = [
"getCurrentUser",
"limiter",
"SECRET_KEY",
"ALGORITHM",
"cookieAuth",
"createAccessToken",
"createRefreshToken",
"setAccessTokenCookie",
"setRefreshTokenCookie",
"clearAccessTokenCookie",
"clearRefreshTokenCookie",
"TokenManager",
"token_refresh_service",
"TokenRefreshService",
"TokenRefreshMiddleware",
"ProactiveTokenRefreshMiddleware",
"CSRFMiddleware",
]

View file

@ -12,7 +12,8 @@ from slowapi import Limiter
from slowapi.util import get_remote_address
from modules.shared.configuration import APP_CONFIG
from modules.interfaces.interfaceDbAppObjects import getRootInterface
from modules.security.rootAccess import getRootDbAppConnector, getRootUser
from modules.interfaces.interfaceDbAppObjects import getInterface
from modules.datamodels.datamodelUam import User, AuthAuthority
from modules.datamodels.datamodelSecurity import Token
@ -52,7 +53,7 @@ limiter = Limiter(key_func=get_remote_address)
# Logger
logger = logging.getLogger(__name__)
# Note: JWT creation and cookie helpers moved to modules.security.jwtService
# Note: JWT creation and cookie helpers moved to modules.auth.jwtService
def _getUserBase(token: str = Depends(cookieAuth)) -> User:
"""
@ -110,8 +111,9 @@ def _getUserBase(token: str = Depends(cookieAuth)) -> User:
logger.warning("Invalid JWT Token")
raise credentialsException
# Initialize Gateway Interface with context
appInterface = getRootInterface()
# Get root user and interface for database access
rootUser = getRootUser()
appInterface = getInterface(rootUser)
# Retrieve user from database
user = appInterface.getUserByUsername(username)
@ -143,7 +145,8 @@ def _getUserBase(token: str = Depends(cookieAuth)) -> User:
db_tokens = []
if tokenId:
try:
db_tokens = appInterface.db.getRecordset(
dbApp = getRootDbAppConnector()
db_tokens = dbApp.getRecordset(
Token, recordFilter={"id": tokenId}
)
except Exception as e:

View file

@ -96,3 +96,4 @@ class CSRFMiddleware(BaseHTTPMiddleware):
return True
except ValueError:
return False

View file

@ -242,25 +242,34 @@ class TokenManager:
return None
# Convenience wrapper to fetch and ensure fresh token for a connection via interface layer
def getFreshToken(self, connectionId: str, secondsBeforeExpiry: int = 30 * 60) -> Optional[Token]:
def getFreshToken(self, connectionId: str, secondsBeforeExpiry: int = 30 * 60, interface=None) -> Optional[Token]:
"""Return a fresh token for a connection, refreshing when expiring soon.
Reads the latest stored token via interface layer, then
uses ensure_fresh_token to refresh if needed and persists the refreshed
token via interface layer.
Args:
connectionId: Connection ID to get token for
secondsBeforeExpiry: Seconds before expiry to refresh
interface: Optional interface instance (if None, uses root interface)
"""
try:
from modules.interfaces.interfaceDbAppObjects import getRootInterface
interfaceDbApp = getRootInterface()
if interface is None:
from modules.security.rootAccess import getRootUser
from modules.interfaces.interfaceDbAppObjects import getInterface
rootUser = getRootUser()
interface = getInterface(rootUser)
token = interfaceDbApp.getConnectionToken(connectionId)
token = interface.getConnectionToken(connectionId)
if not token:
return None
return self.ensureFreshToken(
token,
secondsBeforeExpiry=secondsBeforeExpiry,
saveCallback=lambda t: interfaceDbApp.saveConnectionToken(t)
saveCallback=lambda t: interface.saveConnectionToken(t)
)
except Exception as e:
logger.error(f"getFreshToken: Error fetching or refreshing token for connection {connectionId}: {e}")
return None

View file

@ -10,7 +10,7 @@ from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from typing import Callable
import asyncio
from modules.security.tokenRefreshService import token_refresh_service
from modules.auth.tokenRefreshService import token_refresh_service
from modules.shared.timeUtils import getUtcTimestamp
logger = logging.getLogger(__name__)
@ -183,3 +183,4 @@ class ProactiveTokenRefreshMiddleware(BaseHTTPMiddleware):
except Exception as e:
logger.error(f"Error in proactive token refresh for user {user_id}: {str(e)}")

View file

@ -56,7 +56,7 @@ class TokenRefreshService:
return False
# Import Google token refresh logic
from modules.security.tokenManager import TokenManager
from modules.auth.tokenManager import TokenManager
token_manager = TokenManager()
# Attempt to refresh the token
@ -68,7 +68,7 @@ class TokenRefreshService:
# Update connection status
interface.db.recordModify(UserConnection, connection.id, {
"lastChecked": getUtcTimestamp(),
"expiresAt": refreshed_token.expiresAt
"expiresAt": refreshedToken.expiresAt
})
logger.info(f"Successfully refreshed Google token for connection {connection.id}")
@ -105,7 +105,7 @@ class TokenRefreshService:
return False
# Import Microsoft token refresh logic
from modules.security.tokenManager import TokenManager
from modules.auth.tokenManager import TokenManager
token_manager = TokenManager()
# Attempt to refresh the token
@ -117,7 +117,7 @@ class TokenRefreshService:
# Update connection status
interface.db.recordModify(UserConnection, connection.id, {
"lastChecked": getUtcTimestamp(),
"expiresAt": refreshed_token.expiresAt
"expiresAt": refreshedToken.expiresAt
})
logger.info(f"Successfully refreshed Microsoft token for connection {connection.id}")
@ -156,8 +156,10 @@ class TokenRefreshService:
logger.debug(f"Starting silent token refresh for user {user_id}")
# Get user interface
from modules.interfaces.interfaceDbAppObjects import getRootInterface
root_interface = getRootInterface()
from modules.security.rootAccess import getRootUser
from modules.interfaces.interfaceDbAppObjects import getInterface
rootUser = getRootUser()
root_interface = getInterface(rootUser)
# Get user connections
connections = root_interface.getUserConnections(user_id)
@ -223,8 +225,10 @@ class TokenRefreshService:
logger.debug(f"Starting proactive token refresh for user {user_id}")
# Get user interface
from modules.interfaces.interfaceDbAppObjects import getRootInterface
root_interface = getRootInterface()
from modules.security.rootAccess import getRootUser
from modules.interfaces.interfaceDbAppObjects import getInterface
rootUser = getRootUser()
root_interface = getInterface(rootUser)
# Get user connections
connections = root_interface.getUserConnections(user_id)
@ -287,3 +291,4 @@ class TokenRefreshService:
# Global service instance
token_refresh_service = TokenRefreshService()

View file

@ -10,7 +10,6 @@ from modules.shared.timeUtils import getUtcTimestamp
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelUam import User, AccessLevel, UserPermissions
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
from modules.security.rbac import RbacClass
logger = logging.getLogger(__name__)
@ -1046,211 +1045,6 @@ class DatabaseConnector:
initialId = systemData.get(table)
return initialId
def getRecordsetWithRBAC(
self,
modelClass: Type[BaseModel],
currentUser: User,
recordFilter: Dict[str, Any] = None,
orderBy: str = None,
limit: int = None,
) -> List[Dict[str, Any]]:
"""
Get records with RBAC filtering applied at database level.
Args:
modelClass: Pydantic model class for the table
currentUser: User object with roleLabels
recordFilter: Additional record filters
orderBy: Field to order by (defaults to "id")
limit: Maximum number of records to return
Returns:
List of filtered records
"""
table = modelClass.__name__
try:
if not self._ensureTableExists(modelClass):
return []
# Get RBAC permissions for this table
# AccessRule table is always in DbApp database
from modules.interfaces.interfaceDbAppObjects import getRootInterface
dbApp = getRootInterface().db
RbacInstance = RbacClass(self, dbApp=dbApp)
permissions = RbacInstance.getUserPermissions(
currentUser,
AccessRuleContext.DATA,
table
)
# Check view permission first
if not permissions.view:
logger.debug(f"User {currentUser.id} has no view permission for table {table}")
return []
# Build WHERE clause with RBAC filtering
whereConditions = []
whereValues = []
# Add RBAC WHERE clause based on read permission
rbacWhereClause = self.buildRbacWhereClause(permissions, currentUser, table)
if rbacWhereClause:
whereConditions.append(rbacWhereClause["condition"])
whereValues.extend(rbacWhereClause["values"])
# Add additional record filters
if recordFilter:
for field, value in recordFilter.items():
whereConditions.append(f'"{field}" = %s')
whereValues.append(value)
# Build the query
whereClause = ""
if whereConditions:
whereClause = " WHERE " + " AND ".join(whereConditions)
orderByClause = f' ORDER BY "{orderBy}"' if orderBy else ' ORDER BY "id"'
limitClause = f" LIMIT {limit}" if limit else ""
query = f'SELECT * FROM "{table}"{whereClause}{orderByClause}{limitClause}'
with self.connection.cursor() as cursor:
cursor.execute(query, whereValues)
records = [dict(row) for row in cursor.fetchall()]
# Handle JSONB fields and ensure numeric types are correct
fields = _get_model_fields(modelClass)
for record in records:
for fieldName, fieldType in fields.items():
# Ensure numeric fields are properly typed
if fieldType in ("DOUBLE PRECISION", "INTEGER") and fieldName in record:
value = record[fieldName]
if value is not None:
try:
if fieldType == "DOUBLE PRECISION":
record[fieldName] = float(value)
elif fieldType == "INTEGER":
record[fieldName] = int(value)
except (ValueError, TypeError):
logger.warning(
f"Could not convert {fieldName} to {fieldType} for record {record.get('id', 'unknown')}: {value}"
)
elif fieldType == "JSONB" and fieldName in record:
if record[fieldName] is None:
if fieldName in ["logs", "messages", "tasks", "expectedDocumentFormats", "resultDocuments"]:
record[fieldName] = []
elif fieldName in ["execParameters", "stats"]:
record[fieldName] = {}
else:
record[fieldName] = None
else:
import json
try:
if isinstance(record[fieldName], str):
record[fieldName] = json.loads(record[fieldName])
elif isinstance(record[fieldName], (dict, list)):
pass
else:
record[fieldName] = json.loads(str(record[fieldName]))
except (json.JSONDecodeError, TypeError, ValueError):
logger.warning(
f"Could not parse JSONB field {fieldName}, keeping as string: {record[fieldName]}"
)
return records
except Exception as e:
logger.error(f"Error loading records with RBAC from table {table}: {e}")
return []
def buildRbacWhereClause(
self,
permissions: UserPermissions,
currentUser: User,
table: str
) -> Optional[Dict[str, Any]]:
"""
Build RBAC WHERE clause based on permissions and access level.
Args:
permissions: UserPermissions object
currentUser: User object
table: Table name
Returns:
Dictionary with "condition" and "values" keys, or None if no filtering needed
"""
if not permissions or not hasattr(permissions, "read"):
return None
readLevel = permissions.read
# No access - return empty result condition
if readLevel == AccessLevel.NONE:
return {"condition": "1 = 0", "values": []}
# All records - no filtering needed
if readLevel == AccessLevel.ALL:
return None
# My records - filter by _createdBy or userId field
if readLevel == AccessLevel.MY:
# Try common field names for creator
userIdField = None
if table == "UserInDB":
userIdField = "id"
elif table == "UserConnection":
userIdField = "userId"
else:
userIdField = "_createdBy"
return {
"condition": f'"{userIdField}" = %s',
"values": [currentUser.id]
}
# Group records - filter by mandateId
if readLevel == AccessLevel.GROUP:
if not currentUser.mandateId:
logger.warning(f"User {currentUser.id} has no mandateId for GROUP access")
return {"condition": "1 = 0", "values": []}
# For UserInDB, filter by mandateId directly
if table == "UserInDB":
return {
"condition": '"mandateId" = %s',
"values": [currentUser.mandateId]
}
# For UserConnection, need to join with UserInDB or filter by mandateId in user
elif table == "UserConnection":
# Get all user IDs in the same mandate using direct SQL query
try:
with self.connection.cursor() as cursor:
cursor.execute(
'SELECT "id" FROM "UserInDB" WHERE "mandateId" = %s',
(currentUser.mandateId,)
)
users = cursor.fetchall()
userIds = [u["id"] for u in users]
if not userIds:
return {"condition": "1 = 0", "values": []}
placeholders = ",".join(["%s"] * len(userIds))
return {
"condition": f'"userId" IN ({placeholders})',
"values": userIds
}
except Exception as e:
logger.error(f"Error building GROUP filter for UserConnection: {e}")
return {"condition": "1 = 0", "values": []}
# For other tables, filter by mandateId
else:
return {
"condition": '"mandateId" = %s',
"values": [currentUser.mandateId]
}
return None
def close(self):
"""Close the database connection."""
if (

View file

@ -0,0 +1,86 @@
"""
Azure Communication Services Email Connector
Handles email sending via Azure Communication Services
"""
import logging
from typing import Optional
from azure.communication.email import EmailClient
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
class ConnectorMessagingEmail:
"""
Azure Communication Services Email connector.
Handles email sending.
"""
def __init__(self):
"""
Initialize Azure Communication Services Email client using APP_CONFIG.
"""
try:
connectionString = APP_CONFIG.get("MESSAGING_ACS_CONNECTION_STRING")
senderEmail = APP_CONFIG.get("MESSAGING_ACS_SENDER_EMAIL")
if not connectionString or not senderEmail:
logger.warning("Azure Communication Services credentials not configured for email")
self._client = None
self._senderEmail = None
return
self._client = EmailClient.from_connection_string(connectionString)
self._senderEmail = senderEmail
logger.info("Azure Communication Services Email client initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize Azure Communication Services Email client: {e}")
self._client = None
self._senderEmail = None
def send(self, recipient: str, subject: str, message: str) -> bool:
"""
Send an email via Azure Communication Services.
Args:
recipient: Recipient email address
subject: Email subject
message: Email message content (can be HTML)
Returns:
bool: True if successful, False otherwise
"""
if not self._client or not self._senderEmail:
logger.error("Azure Communication Services Email client not initialized")
return False
try:
messageData = {
"senderAddress": self._senderEmail,
"recipients": {
"to": [{"address": recipient}]
},
"content": {
"subject": subject,
"html": message
}
}
# Try both API versions for compatibility
try:
poller = self._client.begin_send(messageData)
poller.result()
except AttributeError:
poller = self._client.begin_send_message(messageData)
poller.result()
logger.info(f"Email sent successfully to {recipient}")
return True
except Exception as e:
logger.error(f"Failed to send email to {recipient}: {e}")
return False

View file

@ -0,0 +1,78 @@
"""
Twilio SMS Connector
Handles SMS sending via Twilio
"""
import logging
from typing import Optional
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
class ConnectorMessagingSms:
"""
Twilio SMS connector.
Handles SMS sending.
"""
def __init__(self):
"""
Initialize Twilio SMS client using APP_CONFIG.
"""
try:
accountSid = APP_CONFIG.get("MESSAGING_TWILIO_ACCOUNT_SID")
authToken = APP_CONFIG.get("MESSAGING_TWILIO_AUTH_TOKEN")
fromNumber = APP_CONFIG.get("MESSAGING_TWILIO_FROM_NUMBER")
if not accountSid or not authToken:
logger.warning("Twilio credentials not configured for SMS")
self._client = None
self._fromNumber = None
return
try:
from twilio.rest import Client
self._client = Client(accountSid, authToken)
self._fromNumber = fromNumber
logger.info("Twilio SMS client initialized successfully")
except ImportError:
logger.error("Twilio library not installed. Please install with: pip install twilio")
self._client = None
self._fromNumber = None
except Exception as e:
logger.error(f"Failed to initialize Twilio SMS client: {e}")
self._client = None
self._fromNumber = None
def send(self, recipient: str, subject: str, message: str) -> bool:
"""
Send an SMS via Twilio.
Args:
recipient: Recipient phone number (with country code, e.g., '+41791234567')
subject: Ignored (SMS has no subject)
message: SMS message content
Returns:
bool: True if successful, False otherwise
"""
if not self._client or not self._fromNumber:
logger.error("Twilio SMS client not initialized")
return False
try:
messageObj = self._client.messages.create(
body=message,
from_=self._fromNumber,
to=recipient
)
logger.info(f"SMS sent successfully to {recipient}. SID: {messageObj.sid}")
return True
except Exception as e:
logger.error(f"Failed to send SMS to {recipient}: {e}")
return False

View file

@ -0,0 +1,325 @@
"""Messaging models: MessagingSubscription, MessagingSubscriptionRegistration, MessagingDelivery."""
import uuid
from typing import Optional
from enum import Enum
from pydantic import BaseModel, Field, ConfigDict
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
class MessagingChannel(str, Enum):
"""Messaging channel types"""
EMAIL = "email"
SMS = "sms"
WHATSAPP = "whatsapp"
TEAMS_CHAT = "teams_chat"
# Weitere Kanäle können hier hinzugefügt werden
class DeliveryStatus(str, Enum):
"""Individual delivery status"""
PENDING = "pending"
SENT = "sent"
FAILED = "failed"
class MessagingSubscription(BaseModel):
"""Data model for messaging subscriptions"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Unique ID of the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
subscriptionId: str = Field(
description="Unique subscription identifier (e.g., 'system_errors', 'audit_login')",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True}
)
subscriptionLabel: str = Field(
description="Display name of the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True}
)
mandateId: str = Field(
description="ID of the mandate this subscription belongs to",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
description: Optional[str] = Field(
default=None,
description="Description of the subscription",
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False}
)
isSystemSubscription: bool = Field(
default=False,
description="Whether this is a system subscription (only admin can create)",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
)
enabled: bool = Field(
default=True,
description="Whether the subscription is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the subscription was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
lastModified: float = Field(
default_factory=getUtcTimestamp,
description="When the subscription was last modified (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
createdBy: Optional[str] = Field(
default=None,
description="User ID who created the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
modifiedBy: Optional[str] = Field(
default=None,
description="User ID who last modified the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
registerModelLabels(
"MessagingSubscription",
{"en": "Messaging Subscription", "fr": "Abonnement de messagerie"},
{
"id": {"en": "ID", "fr": "ID"},
"subscriptionId": {"en": "Subscription ID", "fr": "ID d'abonnement"},
"subscriptionLabel": {"en": "Subscription Label", "fr": "Label d'abonnement"},
"mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
"description": {"en": "Description", "fr": "Description"},
"isSystemSubscription": {"en": "System Subscription", "fr": "Abonnement système"},
"enabled": {"en": "Enabled", "fr": "Activé"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
"createdBy": {"en": "Created By", "fr": "Créé par"},
"modifiedBy": {"en": "Modified By", "fr": "Modifié par"},
},
)
class MessagingSubscriptionRegistration(BaseModel):
"""Data model for user registrations to messaging subscriptions"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Unique ID of the registration",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
subscriptionId: str = Field(
description="ID of the subscription this registration belongs to",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True}
)
userId: str = Field(
description="ID of the user registered to this subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
channel: MessagingChannel = Field(
description="Channel type for this registration",
json_schema_extra={
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": [
{"value": "email", "label": {"en": "Email", "fr": "Email"}},
{"value": "sms", "label": {"en": "SMS", "fr": "SMS"}},
{"value": "whatsapp", "label": {"en": "WhatsApp", "fr": "WhatsApp"}},
{"value": "teams_chat", "label": {"en": "Teams Chat", "fr": "Chat Teams"}}
]
}
)
channelConfig: str = Field(
default="",
description="Channel-specific configuration (e.g., email address, phone number, Teams user ID)",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False}
)
enabled: bool = Field(
default=True,
description="Whether this registration is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the registration was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
lastModified: float = Field(
default_factory=getUtcTimestamp,
description="When the registration was last modified (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
registerModelLabels(
"MessagingSubscriptionRegistration",
{"en": "Messaging Registration", "fr": "Inscription à la messagerie"},
{
"id": {"en": "ID", "fr": "ID"},
"subscriptionId": {"en": "Subscription ID", "fr": "ID d'abonnement"},
"userId": {"en": "User ID", "fr": "ID utilisateur"},
"channel": {"en": "Channel", "fr": "Canal"},
"channelConfig": {"en": "Channel Config", "fr": "Configuration du canal"},
"enabled": {"en": "Enabled", "fr": "Activé"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
},
)
class MessagingDelivery(BaseModel):
"""Data model for individual message deliveries"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Unique ID of the delivery",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
subscriptionId: str = Field(
description="ID of the subscription this delivery belongs to",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
userId: str = Field(
description="ID of the user receiving this delivery",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
channel: MessagingChannel = Field(
description="Channel used for this delivery",
json_schema_extra={
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": False,
"frontend_options": [
{"value": "email", "label": {"en": "Email", "fr": "Email"}},
{"value": "sms", "label": {"en": "SMS", "fr": "SMS"}},
{"value": "whatsapp", "label": {"en": "WhatsApp", "fr": "WhatsApp"}},
{"value": "teams_chat", "label": {"en": "Teams Chat", "fr": "Chat Teams"}}
]
}
)
status: DeliveryStatus = Field(
default=DeliveryStatus.PENDING,
description="Status of the delivery",
json_schema_extra={
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": False,
"frontend_options": [
{"value": "pending", "label": {"en": "Pending", "fr": "En attente"}},
{"value": "sent", "label": {"en": "Sent", "fr": "Envoyé"}},
{"value": "failed", "label": {"en": "Failed", "fr": "Échoué"}}
]
}
)
errorMessage: Optional[str] = Field(
default=None,
description="Error message if delivery failed",
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
)
sentAt: Optional[float] = Field(
default=None,
description="When the delivery was sent (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the delivery record was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
registerModelLabels(
"MessagingDelivery",
{"en": "Messaging Delivery", "fr": "Livraison de messagerie"},
{
"id": {"en": "ID", "fr": "ID"},
"subscriptionId": {"en": "Subscription ID", "fr": "ID d'abonnement"},
"userId": {"en": "User ID", "fr": "ID utilisateur"},
"channel": {"en": "Channel", "fr": "Canal"},
"status": {"en": "Status", "fr": "Statut"},
"errorMessage": {"en": "Error Message", "fr": "Message d'erreur"},
"sentAt": {"en": "Sent At", "fr": "Envoyé le"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
},
)
class MessagingEventParameters(BaseModel):
"""Data model for event parameters passed to subscription functions"""
triggerData: dict = Field(
default_factory=dict,
description="Event data from trigger as dictionary/JSON",
json_schema_extra={"frontend_type": "json", "frontend_readonly": False, "frontend_required": False}
)
registerModelLabels(
"MessagingEventParameters",
{"en": "Messaging Event Parameters", "fr": "Paramètres d'événement de messagerie"},
{
"triggerData": {"en": "Trigger Data", "fr": "Données de déclenchement"},
},
)
registerModelLabels(
"MessagingSendResult",
{"en": "Messaging Send Result", "fr": "Résultat d'envoi de messagerie"},
{
"success": {"en": "Success", "fr": "Succès"},
"deliveryId": {"en": "Delivery ID", "fr": "ID de livraison"},
"errorMessage": {"en": "Error Message", "fr": "Message d'erreur"},
},
)
registerModelLabels(
"MessagingSubscriptionExecutionResult",
{"en": "Messaging Subscription Execution Result", "fr": "Résultat d'exécution d'abonnement"},
{
"success": {"en": "Success", "fr": "Succès"},
"messagesSent": {"en": "Messages Sent", "fr": "Messages envoyés"},
"errorMessage": {"en": "Error Message", "fr": "Message d'erreur"},
},
)
class MessagingSendResult(BaseModel):
"""Data model for sendMessage result"""
success: bool = Field(
description="Whether the message was sent successfully",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
)
deliveryId: Optional[str] = Field(
default=None,
description="ID of the created MessagingDelivery record",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
errorMessage: Optional[str] = Field(
default=None,
description="Error message if sending failed",
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
)
class MessagingSubscriptionExecutionResult(BaseModel):
"""Data model for subscription function execution result"""
success: bool = Field(
description="Whether the subscription execution was successful",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
)
messagesSent: int = Field(
default=0,
description="Number of messages sent",
json_schema_extra={"frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
)
errorMessage: Optional[str] = Field(
default=None,
description="Error message if execution failed",
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(extra="allow") # Allow additional fields for custom results

View file

@ -1,12 +0,0 @@
"""
Automation feature - handles automated workflow execution and scheduling.
Moved from interfaces/interfaceDbChatObjects.py to follow proper architectural separation:
- Interface layer: Data access only (getAutomationDefinition, etc.)
- Feature layer: Business logic and orchestration (executeAutomation, syncAutomationEvents)
"""
from .mainAutomation import executeAutomation, syncAutomationEvents, createAutomationEventHandler
__all__ = ['executeAutomation', 'syncAutomationEvents', 'createAutomationEventHandler']

View file

@ -1,200 +0,0 @@
"""
Chat Althaus Data Scheduler
This module handles scheduled data updates for Althaus preprocessing service.
Triggers daily at 01:00 UTC to update the database with configuration.
"""
import logging
import aiohttp
from modules.services import getInterface as getServices
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
# Configuration
ALTHAUS_ENDPOINT = "https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataprocessor/update-db-with-config"
# JSON configuration for Althaus data processing
ALTHAUS_CONFIG_JSON = {
"tables": [
{
"name": "Artikel",
"powerbi_table_name": "Artikel",
"steps": [
{
"keep": {
"columns": [
"I_ID",
"Artikelbeschrieb",
"Artikelbezeichnung",
"Artikelgruppe",
"Artikelkategorie",
"Artikelkürzel",
"Artikelnummer",
"Einheit",
"Gesperrt",
"Keywords",
"Lieferant",
"Warengruppe"
]
}
},
{
"fillna": {
"column": "Lieferant",
"value": "Unbekannt"
}
}
]
},
{
"name": "Einkaufspreis",
"powerbi_table_name": "Einkaufspreis",
"steps": [
{
"to_numeric": {
"column": "EP_CHF",
"errors": "coerce"
}
},
{
"dropna": {
"subset": ["EP_CHF"]
}
}
]
}
]
}
# Global manager instance
_dataScheduler = None
class ManagerChatAlthaus:
"""Manages scheduled data updates for Althaus preprocessing service."""
def __init__(self, eventUser=None):
self.eventUser = eventUser
self.services = None
self.appEnvType = "dev"
try:
if not eventUser:
logger.error("Event user not found - Althaus scheduler requires event user")
else:
self.services = getServices(eventUser, None)
self.appEnvType = self.services.utils.configGet("APP_ENV_TYPE", "dev")
logger.info(f"ChatAlthaus manager initialized for env: {self.appEnvType}")
except Exception as e:
logger.error(f"Initialization error in ManagerChatAlthaus.__init__: {e}")
async def updateDatabaseWithConfig(self) -> bool:
"""Update the Althaus database with configuration."""
try:
logger.info("Starting Althaus database update with config")
# Get authorization secret from config
authSecret = APP_CONFIG.get("PREPROCESS_ALTHAUS_CHAT_SECRET")
if not authSecret:
logger.error("PREPROCESS_ALTHAUS_CHAT_SECRET not found in config")
return False
# Prepare headers with authorization
headers = {
"X-PP-API-Key": authSecret,
"Content-Type": "application/json"
}
# Make POST request
timeout = aiohttp.ClientTimeout(total=60)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.post(
ALTHAUS_ENDPOINT,
headers=headers,
json=ALTHAUS_CONFIG_JSON
) as response:
if response.status in [200, 201]:
responseText = await response.text()
logger.info(f"Althaus database update successful: {response.status}")
logger.debug(f"Response: {responseText}")
return True
else:
errorText = await response.text()
logger.error(f"Althaus database update failed: {response.status} - {errorText}")
return False
except Exception as e:
logger.error(f"Error during Althaus database update: {str(e)}")
return False
async def performDataUpdate(eventUser) -> bool:
"""Perform Althaus data update.
This function is called by the scheduler and can be used independently.
Args:
eventUser: Event user to use for the update
Returns:
bool: True if update was successful, False otherwise
"""
try:
logger.info("Starting Althaus data update...")
if not eventUser:
logger.error("Event user not provided - cannot perform data update")
return False
manager = ManagerChatAlthaus(eventUser)
success = await manager.updateDatabaseWithConfig()
if success:
logger.info("Althaus data update completed successfully")
else:
logger.error("Althaus data update failed")
return success
except Exception as e:
logger.error(f"Error in performing Althaus data update: {str(e)}")
return False
def startDataScheduler(eventUser):
"""Initialize the global data scheduler with the eventUser."""
global _dataScheduler
if _dataScheduler is None:
_dataScheduler = ManagerChatAlthaus(eventUser)
logger.info("Global Althaus data scheduler initialized with eventUser")
try:
# Register scheduled job - daily at 01:00 UTC
if _dataScheduler.services:
_dataScheduler.services.utils.eventRegisterCron(
job_id="chatAlthaus.updateData",
func=scheduledDataUpdate,
cron_kwargs={"hour": "1", "minute": "0"},
replace_existing=True,
coalesce=True,
max_instances=1,
misfire_grace_time=3600, # 1 hour grace time for daily jobs
)
logger.info("Registered Althaus data scheduler (daily at 01:00 UTC)")
else:
logger.error("Services not available - cannot register scheduler")
except Exception as e:
logger.error(f"Failed to register scheduler for Althaus data update: {str(e)}")
return _dataScheduler
async def scheduledDataUpdate():
"""Scheduled data update function that uses the global scheduler."""
try:
global _dataScheduler
if _dataScheduler and _dataScheduler.eventUser:
return await performDataUpdate(_dataScheduler.eventUser)
else:
logger.error("Data scheduler not properly initialized - no eventUser")
return False
except Exception as e:
logger.error(f"Error in scheduled data update: {str(e)}")
return False

View file

@ -1,41 +0,0 @@
import logging
from typing import Optional
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum
from modules.workflows.workflowManager import WorkflowManager
from modules.services import getInterface as getServices
logger = logging.getLogger(__name__)
async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode: WorkflowModeEnum, workflowId: Optional[str] = None) -> ChatWorkflow:
"""
Starts a new chat or continues an existing one, then launches processing asynchronously.
Args:
currentUser: Current user
userInput: User input request
workflowId: Optional workflow ID to continue existing workflow
workflowMode: "Dynamic" for iterative dynamic-style processing, "Automation" for automated workflow execution
Example usage for Dynamic mode:
workflow = await chatStart(currentUser, userInput, workflowMode=WorkflowModeEnum.WORKFLOW_DYNAMIC)
"""
try:
services = getServices(currentUser, None)
workflowManager = WorkflowManager(services)
workflow = await workflowManager.workflowStart(userInput, workflowMode, workflowId)
return workflow
except Exception as e:
logger.error(f"Error starting chat: {str(e)}")
raise
async def chatStop(currentUser: User, workflowId: str) -> ChatWorkflow:
"""Stops a running chat."""
try:
services = getServices(currentUser, None)
workflowManager = WorkflowManager(services)
return await workflowManager.workflowStop(workflowId)
except Exception as e:
logger.error(f"Error stopping chat: {str(e)}")
raise

View file

@ -1,12 +1,11 @@
"""
Options API feature module.
Dynamic Options API feature module.
Provides dynamic options for frontend select/multiselect fields.
"""
import logging
from typing import List, Dict, Any, Optional
from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceDbAppObjects import getInterface
logger = logging.getLogger(__name__)
@ -37,12 +36,13 @@ CONNECTION_STATUS_OPTIONS = [
]
def getOptions(optionsName: str, currentUser: Optional[User] = None) -> List[Dict[str, Any]]:
def getOptions(optionsName: str, services, currentUser: Optional[User] = None) -> List[Dict[str, Any]]:
"""
Get options for a given options name.
Args:
optionsName: Name of the options set to retrieve (e.g., "user.role", "user.connection")
services: Services instance for data access
currentUser: Optional current user for context-aware options
Returns:
@ -57,8 +57,7 @@ def getOptions(optionsName: str, currentUser: Optional[User] = None) -> List[Dic
# Fetch roles from database
if currentUser:
try:
interface = getInterface(currentUser)
roles = interface.getAllRoles()
roles = services.interfaceDbApp.getAllRoles()
# Convert Role objects to options format
options = []
@ -101,8 +100,7 @@ def getOptions(optionsName: str, currentUser: Optional[User] = None) -> List[Dic
return []
try:
interface = getInterface(currentUser)
connections = interface.getUserConnections(currentUser.id)
connections = services.interfaceDbApp.getUserConnections(currentUser.id)
return [
{
@ -135,3 +133,4 @@ def getAvailableOptionsNames() -> List[str]:
"connection.status",
"user.connection",
]

View file

@ -10,10 +10,10 @@ async def start(eventUser) -> None:
eventUser: System-level event user for background operations (provided by app.py)
"""
# Feature Automation Events
# Feature Workflow (Automation)
if eventUser:
try:
from modules.features.automation import syncAutomationEvents
from modules.features.workflow import syncAutomationEvents
from modules.shared.callbackRegistry import callbackRegistry
# Get services for event user (provides access to interfaces)
@ -22,26 +22,20 @@ async def start(eventUser) -> None:
# Register callback for automation changes
async def onAutomationChanged(chatInterface):
"""Callback triggered when automations are created/updated/deleted."""
await syncAutomationEvents(chatInterface, eventUser)
# Get services for event user to pass to syncAutomationEvents
eventServices = getServices(eventUser, None)
await syncAutomationEvents(eventServices, eventUser)
callbackRegistry.register('automation.changed', onAutomationChanged)
logger.info("Registered automation change callback")
logger.info("Workflow: Registered change callback")
# Initial sync on startup - use interface from services
await syncAutomationEvents(services.interfaceDbChat, eventUser)
logger.info("Automation events synced on startup")
# Initial sync on startup - use services
await syncAutomationEvents(services, eventUser)
logger.info("Workflow: Events synced on startup")
except Exception as e:
logger.error(f"Error setting up automation events on startup: {str(e)}")
logger.error(f"Workflow: Error setting up events on startup: {str(e)}")
# Don't fail startup if automation sync fails
# Feature SyncDelta
from modules.features.syncDelta import mainSyncDelta
mainSyncDelta.startSyncManager(eventUser)
# Feature ChatAlthaus
from modules.features.chatAlthaus import mainChatAlthaus
mainChatAlthaus.startDataScheduler(eventUser)
await mainChatAlthaus.performDataUpdate(eventUser)
# Feature ...
@ -56,14 +50,10 @@ async def stop(eventUser) -> None:
eventUser: System-level event user (provided by app.py)
"""
# Unregister automation callback
try:
from modules.shared.callbackRegistry import callbackRegistry
# Note: We'd need to store the callback reference to unregister it properly
# For now, callbacks will remain registered (acceptable for shutdown)
logger.info("Automation callbacks remain registered (will be cleaned up on process exit)")
except Exception as e:
logger.warning(f"Error during automation callback cleanup: {str(e)}")
# Feature Workflow (Automation)
# Callbacks will remain registered (acceptable for shutdown)
logger.info("Workflow: Callbacks remain registered (will be cleaned up on shutdown)")
# Feature ...

View file

@ -1,830 +0,0 @@
"""
Delta Group Sync Manager
This module handles the synchronization of tickets to SharePoint using the new
Graph API-based connector architecture.
"""
import logging
import os
import io
import pandas as pd
import csv as csv_module
from io import StringIO, BytesIO
from datetime import datetime, UTC
from modules.services import getInterface as getServices
logger = logging.getLogger(__name__)
class ManagerSyncDelta:
"""Manages Tickets to SharePoint synchronization for Delta Group.
Supports two sync modes:
- CSV mode: Uses CSV files for synchronization (default)
- Excel mode: Uses Excel (.xlsx) files for synchronization
To change sync mode, use the setSyncMode() method or modify SYNC_MODE class variable.
"""
SHAREPOINT_SITE_NAME = "SteeringBPM"
SHAREPOINT_SITE_PATH = "SteeringBPM"
SHAREPOINT_HOSTNAME = "deltasecurityag.sharepoint.com"
SHAREPOINT_MAIN_FOLDER = "/General/50 Docs hosted by SELISE"
SHAREPOINT_BACKUP_FOLDER = "/General/50 Docs hosted by SELISE/SyncHistory"
SHAREPOINT_AUDIT_FOLDER = "/General/50 Docs hosted by SELISE/SyncHistory"
SHAREPOINT_USER_ID = "patrick.motsch@delta.ch"
SYNC_MODE = "xlsx" # Can be "csv" or "xlsx"
# File names for different sync modes
SYNC_FILE_CSV = "DELTAgroup x SELISE Ticket Exchange List.csv"
SYNC_FILE_XLSX = "DELTAgroup x SELISE Ticket Exchange List.xlsx"
# Tickets connection parameters
JIRA_USERNAME = "p.motsch@valueon.ch"
JIRA_API_TOKEN = "" # Will be set in __init__
JIRA_URL = "https://deltasecurity.atlassian.net"
JIRA_PROJECT_CODE = "DCS"
JIRA_ISSUE_TYPE = "Task"
# Task sync definition for field mapping
TASK_SYNC_DEFINITION={
#key=excel-header, [get:ticket>excel | put: excel>ticket, tickets-xml-field-list]
'ID': ['get', ['key']],
'Module Category': ['get', ['fields', 'customfield_10058', 'value']],
'Summary': ['get', ['fields', 'summary']],
'Description': ['get', ['fields', 'description']], # ADF format - needs conversion to text
'References': ['get', ['fields', 'customfield_10066']], # Field exists, may be None
'Priority': ['get', ['fields', 'priority', 'name']],
'Issue Status': ['get', ['fields', 'status', 'name']],
'Assignee': ['get', ['fields', 'assignee', 'displayName']],
'Issue Created': ['get', ['fields', 'created']],
'Due Date': ['get', ['fields', 'duedate']], # Field exists, may be None
'DELTA Comments': ['get', ['fields', 'customfield_10167']], # Field exists, may be None
'SELISE Ticket References': ['put', ['fields', 'customfield_10067']],
'SELISE Status Values': ['put', ['fields', 'customfield_10065']],
'SELISE Comments': ['put', ['fields', 'customfield_10168']],
}
def __init__(self, eventUser=None):
self.targetSite = None
self.services = None
self.sharepointConnection = None
self.eventUser = eventUser
self.sync_audit_log = [] # Store audit log entries in memory
try:
if not eventUser:
logger.error("Event user not found - SharePoint connection required")
self._logAuditEvent("SYNC_INIT", "FAILED", "Event user not found")
else:
self.services = getServices(eventUser, None)
# Read config values using services
self.APP_ENV_TYPE = self.services.utils.configGet("APP_ENV_TYPE", "dev")
self.JIRA_API_TOKEN = self.services.utils.configGet("Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET", "")
# Resolve SharePoint connection for the configured user id
self.sharepointConnection = self.services.chat.getUserConnectionByExternalUsername("msft", self.SHAREPOINT_USER_ID)
if not self.sharepointConnection:
logger.error(
f"No SharePoint connection found for user: {self.SHAREPOINT_USER_ID}"
)
self._logAuditEvent("SYNC_INIT", "FAILED", f"No SharePoint connection for user: {self.SHAREPOINT_USER_ID}")
else:
# Configure SharePoint service token and set connector reference
if not self.services.sharepoint.setAccessTokenFromConnection(
self.sharepointConnection
):
logger.error("Failed to set SharePoint token from UserConnection")
self._logAuditEvent("SYNC_INIT", "FAILED", "Failed to set SharePoint token")
else:
logger.info(
f"SharePoint token configured for connection: {self.sharepointConnection.id}"
)
self._logAuditEvent("SYNC_INIT", "SUCCESS", f"SharePoint token configured for connection: {self.sharepointConnection.id}")
except Exception as e:
logger.error(f"Initialization error in ManagerSyncDelta.__init__: {e}")
self._logAuditEvent("SYNC_INIT", "ERROR", f"Initialization error: {str(e)}")
def _logAuditEvent(self, action: str, status: str, details: str):
"""Log audit events for sync operations to memory."""
try:
timestamp = datetime.fromtimestamp(self.services.utils.timestampGetUtc(), UTC).strftime("%Y-%m-%d %H:%M:%S UTC")
userId = str(self.eventUser.id) if self.eventUser else "system"
logEntry = f"{timestamp} | {userId} | {action} | {status} | {details}"
self.sync_audit_log.append(logEntry)
logger.info(f"Sync Audit: {logEntry}")
except Exception as e:
logger.warning(f"Failed to log audit event: {str(e)}")
def _logSyncChanges(self, mergeDetails: dict, syncMode: str):
"""Log detailed field changes for sync operations."""
try:
# Log summary statistics
summary = f"Sync {syncMode} - Updated: {mergeDetails['updated']}, Added: {mergeDetails['added']}, Unchanged: {mergeDetails['unchanged']}"
self._logAuditEvent("SYNC_CHANGES_SUMMARY", "INFO", summary)
# Log individual field changes (limit to first 10 to avoid spam)
for change in mergeDetails['changes'][:10]:
# Truncate very long changes to avoid logging issues
if len(change) > 500:
change = change[:500] + "... [truncated]"
self._logAuditEvent("SYNC_FIELD_CHANGE", "INFO", f"{syncMode}: {change}")
# Log count if there were more changes
if len(mergeDetails['changes']) > 10:
self._logAuditEvent("SYNC_FIELD_CHANGE", "INFO", f"{syncMode}: ... and {len(mergeDetails['changes']) - 10} more changes")
except Exception as e:
logger.warning(f"Failed to log sync changes: {str(e)}")
async def _saveAuditLogToSharepoint(self):
"""Save the sync audit log to SharePoint."""
try:
if not self.sync_audit_log or not self.targetSite:
return False
# Generate log filename with current timestamp
timestamp = datetime.fromtimestamp(self.services.utils.timestampGetUtc(), UTC).strftime("%Y%m%d_%H%M%S")
log_filename = f"log_{timestamp}.log"
# Create log content
log_content = "\n".join(self.sync_audit_log)
log_bytes = log_content.encode('utf-8')
# Upload to SharePoint audit folder
await self.services.sharepoint.uploadFile(
siteId=self.targetSite['id'],
folderPath=self.SHAREPOINT_AUDIT_FOLDER,
fileName=log_filename,
content=log_bytes
)
logger.info(f"Sync audit log saved to SharePoint: {log_filename}")
self._logAuditEvent("AUDIT_LOG_SAVE", "SUCCESS", f"Audit log saved to SharePoint: {log_filename}")
return True
except Exception as e:
logger.error(f"Failed to save audit log to SharePoint: {str(e)}")
self._logAuditEvent("AUDIT_LOG_SAVE", "FAILED", f"Failed to save audit log: {str(e)}")
return False
def getSyncFileName(self) -> str:
"""Get the appropriate sync file name based on the sync mode."""
if self.SYNC_MODE == "xlsx":
return self.SYNC_FILE_XLSX
else: # Default to CSV
return self.SYNC_FILE_CSV
def setSyncMode(self, mode: str) -> bool:
"""Set the sync mode to either 'csv' or 'xlsx'.
Args:
mode: Either 'csv' or 'xlsx'
Returns:
bool: True if mode was set successfully, False if invalid mode
"""
if mode.lower() in ["csv", "xlsx"]:
self.SYNC_MODE = mode.lower()
logger.info(f"Sync mode changed to: {self.SYNC_MODE}")
return True
else:
logger.error(f"Invalid sync mode: {mode}. Must be 'csv' or 'xlsx'")
return False
async def initializeInterface(self) -> bool:
"""Initialize SharePoint connector; tickets connector is created by interface on demand."""
try:
# Validate init-prepared members
if not self.services or not self.sharepointConnection or not self.services.sharepoint:
logger.error("Service or SharePoint connection not initialized")
return False
# Resolve the site by hostname + site path to get the real site ID
logger.info(
f"Resolving site ID via hostname+path: {self.SHAREPOINT_HOSTNAME}:/sites/{self.SHAREPOINT_SITE_PATH}"
)
resolved = await self.services.sharepoint.findSiteByUrl(
hostname=self.SHAREPOINT_HOSTNAME,
sitePath=self.SHAREPOINT_SITE_PATH
)
if not resolved:
logger.error(
f"Failed to resolve site. Hostname: {self.SHAREPOINT_HOSTNAME}, Path: {self.SHAREPOINT_SITE_PATH}"
)
return False
self.targetSite = {
"id": resolved.get("id"),
"displayName": resolved.get("displayName", self.SHAREPOINT_SITE_NAME),
"name": resolved.get("name", self.SHAREPOINT_SITE_NAME)
}
# Test site access by listing root of the drive
logger.info("Testing site access using resolved site ID...")
test_result = await self.services.sharepoint.listFolderContents(
siteId=self.targetSite["id"],
folderPath=""
)
if test_result is not None:
logger.info(
f"Site access confirmed: {self.targetSite['displayName']} (ID: {self.targetSite['id']})"
)
else:
logger.error("Could not access site drive - check permissions")
return False
return True
except Exception as e:
logger.error(f"Error initializing connectors: {str(e)}")
return False
async def syncTicketsOverSharepoint(self) -> bool:
"""Perform Tickets to SharePoint synchronization using list-based interface and local CSV/XLSX handling."""
try:
logger.info(f"Starting JIRA to SharePoint synchronization (Mode: {self.SYNC_MODE})")
self._logAuditEvent("SYNC_START", "INFO", f"Starting JIRA to SharePoint sync (Mode: {self.SYNC_MODE})")
# Initialize interface
if not await self.initializeInterface():
logger.error("Failed to initialize connectors")
self._logAuditEvent("SYNC_INTERFACE", "FAILED", "Failed to initialize connectors")
return False
# Dump current Jira fields to text file for reference
try:
pass # await dump_jira_fields_to_file()
except Exception as e:
logger.warning(f"Failed to dump JIRA fields (non-blocking): {str(e)}")
# Dump actual JIRA data for debugging
try:
pass # await dump_jira_data_to_file()
except Exception as e:
logger.warning(f"Failed to dump JIRA data (non-blocking): {str(e)}")
# Get the appropriate sync file name based on mode
sync_file_name = self.getSyncFileName()
logger.info(f"Using sync file: {sync_file_name}")
# Create list-based ticket interface (initialize connector by type)
sync_interface = await self.services.ticket.connectTicket(
taskSyncDefinition=self.TASK_SYNC_DEFINITION,
connectorType="Jira",
connectorParams={
"apiUsername": self.JIRA_USERNAME,
"apiToken": self.JIRA_API_TOKEN,
"apiUrl": self.JIRA_URL,
"projectCode": self.JIRA_PROJECT_CODE,
"ticketType": self.JIRA_ISSUE_TYPE,
},
)
# Perform the sophisticated sync based on mode
if self.SYNC_MODE == "xlsx":
# Export tickets to list
data_list = await sync_interface.exportTicketsAsList()
self._logAuditEvent("SYNC_EXPORT", "INFO", f"Exported {len(data_list)} tickets from JIRA")
# Read existing Excel headers/content
existing_data = []
existing_headers = {"header1": "Header 1", "header2": "Header 2"}
try:
file_path = f"{self.SHAREPOINT_MAIN_FOLDER}/{sync_file_name}"
excel_content = await self.services.sharepoint.downloadFileByPath(
siteId=self.targetSite['id'], filePath=file_path
)
existing_data, existing_headers = self.parseExcelContent(excel_content)
except Exception:
pass
# Merge and write
merged_data, merge_details = self.mergeJiraWithExistingDetailed(data_list, existing_data)
# Log detailed changes for Excel mode
self._logSyncChanges(merge_details, "EXCEL")
await self.backupSharepointFile(filename=sync_file_name)
excel_bytes = self.createExcelContent(merged_data, existing_headers)
await self.services.sharepoint.uploadFile(
siteId=self.targetSite['id'],
folderPath=self.SHAREPOINT_MAIN_FOLDER,
fileName=sync_file_name,
content=excel_bytes,
)
# Import back to tickets
try:
excel_content = await self.services.sharepoint.downloadFileByPath(
siteId=self.targetSite['id'], filePath=file_path
)
excel_rows, _ = self.parseExcelContent(excel_content)
self._logAuditEvent("SYNC_IMPORT", "INFO", f"Importing {len(excel_rows)} Excel rows back to tickets")
except Exception as e:
excel_rows = []
self._logAuditEvent("SYNC_IMPORT", "WARNING", f"Failed to download Excel for import: {str(e)}")
await sync_interface.importListToTickets(excel_rows)
else: # CSV mode (default)
# Export tickets to list
data_list = await sync_interface.exportTicketsAsList()
self._logAuditEvent("SYNC_EXPORT", "INFO", f"Exported {len(data_list)} tickets from JIRA")
# Prepare headers by reading existing CSV if present
existing_headers = {"header1": "Header 1", "header2": "Header 2"}
existing_data: list[dict] = []
try:
file_path = f"{self.SHAREPOINT_MAIN_FOLDER}/{sync_file_name}"
csv_content = await self.services.sharepoint.downloadFileByPath(
siteId=self.targetSite['id'], filePath=file_path
)
csv_lines = csv_content.decode('utf-8').split('\n')
if len(csv_lines) >= 2:
existing_headers["header1"] = csv_lines[0].rstrip('\r\n')
existing_headers["header2"] = csv_lines[1].rstrip('\r\n')
# Parse existing CSV rows after the two header lines
df_existing = pd.read_csv(io.BytesIO(csv_content), skiprows=2, quoting=1, escapechar='\\', on_bad_lines='skip', engine='python')
existing_data = df_existing.to_dict('records')
except Exception:
pass
await self.backupSharepointFile(filename=sync_file_name)
merged_data, _ = self.mergeJiraWithExistingDetailed(data_list, existing_data)
csv_bytes = self.createCsvContent(merged_data, existing_headers)
await self.services.sharepoint.uploadFile(
siteId=self.targetSite['id'],
folderPath=self.SHAREPOINT_MAIN_FOLDER,
fileName=sync_file_name,
content=csv_bytes,
)
# Import from CSV
try:
csv_content = await self.services.sharepoint.downloadFileByPath(
siteId=self.targetSite['id'], filePath=file_path
)
df = pd.read_csv(io.BytesIO(csv_content), skiprows=2, quoting=1, escapechar='\\', on_bad_lines='skip', engine='python')
csv_rows = df.to_dict('records')
self._logAuditEvent("SYNC_IMPORT", "INFO", f"Importing {len(csv_rows)} CSV rows back to tickets")
except Exception as e:
csv_rows = []
self._logAuditEvent("SYNC_IMPORT", "WARNING", f"Failed to download CSV for import: {str(e)}")
await sync_interface.importListToTickets(csv_rows)
logger.info(f"JIRA to SharePoint synchronization completed successfully (Mode: {self.SYNC_MODE})")
self._logAuditEvent("SYNC_COMPLETE", "SUCCESS", f"JIRA to SharePoint sync completed successfully (Mode: {self.SYNC_MODE})")
# Save audit log to SharePoint
await self._saveAuditLogToSharepoint()
return True
except Exception as e:
logger.error(f"Error during JIRA to SharePoint synchronization: {str(e)}")
self._logAuditEvent("SYNC_ERROR", "FAILED", f"Error during sync: {str(e)}")
# Save audit log to SharePoint even on error
await self._saveAuditLogToSharepoint()
return False
async def backupSharepointFile(self, *, filename: str) -> bool:
try:
timestamp = datetime.fromtimestamp(self.services.utils.timestampGetUtc(), UTC).strftime("%Y%m%d_%H%M%S")
backup_filename = f"backup_{timestamp}_{filename}"
await self.services.sharepoint.copyFileAsync(
siteId=self.targetSite['id'],
sourceFolder=self.SHAREPOINT_MAIN_FOLDER,
sourceFile=filename,
destFolder=self.SHAREPOINT_BACKUP_FOLDER,
destFile=backup_filename,
)
self._logAuditEvent("SYNC_BACKUP", "SUCCESS", f"Backed up file: {filename} -> {backup_filename}")
return True
except Exception as e:
if "itemNotFound" in str(e) or "404" in str(e):
self._logAuditEvent("SYNC_BACKUP", "SKIPPED", f"File not found for backup: {filename}")
return True
logger.warning(f"Backup failed: {e}")
self._logAuditEvent("SYNC_BACKUP", "FAILED", f"Backup failed for {filename}: {str(e)}")
return False
def mergeJiraWithExistingDetailed(self, jira_data: list[dict], existing_data: list[dict]) -> tuple[list[dict], dict]:
existing_lookup = {row.get("ID"): row for row in existing_data if row.get("ID")}
merged_data: list[dict] = []
changes: list[str] = []
updated_count = added_count = unchanged_count = 0
for jira_row in jira_data:
jira_id = jira_row.get("ID")
if jira_id and jira_id in existing_lookup:
existing_row = existing_lookup[jira_id].copy()
row_changes: list[str] = []
for field_name, field_config in self.TASK_SYNC_DEFINITION.items():
if field_config[0] == 'get':
old_value = "" if existing_row.get(field_name) is None else str(existing_row.get(field_name))
new_value = "" if jira_row.get(field_name) is None else str(jira_row.get(field_name))
# Convert ADF data to readable text for logging
if isinstance(new_value, dict) and new_value.get("type") == "doc":
new_value_readable = self.convertAdfToText(new_value)
if old_value != new_value_readable:
row_changes.append(f"{field_name}: '{old_value[:100]}...' -> '{new_value_readable[:100]}...'")
elif old_value != new_value:
# Truncate long values for logging
old_truncated = old_value[:100] + "..." if len(old_value) > 100 else old_value
new_truncated = new_value[:100] + "..." if len(new_value) > 100 else new_value
row_changes.append(f"{field_name}: '{old_truncated}' -> '{new_truncated}'")
existing_row[field_name] = jira_row.get(field_name)
merged_data.append(existing_row)
if row_changes:
updated_count += 1
changes.append(f"Row ID {jira_id} updated: {', '.join(row_changes)}")
else:
unchanged_count += 1
del existing_lookup[jira_id]
else:
merged_data.append(jira_row)
added_count += 1
changes.append(f"Row ID {jira_id} added as new record")
for remaining in existing_lookup.values():
merged_data.append(remaining)
unchanged_count += 1
details = {"updated": updated_count, "added": added_count, "unchanged": unchanged_count, "changes": changes}
return merged_data, details
def createCsvContent(self, data: list[dict], existing_headers: dict | None = None) -> bytes:
timestamp = datetime.fromtimestamp(self.services.utils.timestampGetUtc(), UTC).strftime("%Y-%m-%d %H:%M:%S UTC")
if existing_headers is None:
existing_headers = {"header1": "Header 1", "header2": "Header 2"}
if not data:
cols = list(self.TASK_SYNC_DEFINITION.keys())
df = pd.DataFrame(columns=cols)
else:
df = pd.DataFrame(data)
for column in df.columns:
df[column] = df[column].astype("object").fillna("")
df[column] = df[column].astype(str).str.replace('\n', '\\n', regex=False).str.replace('"', '""', regex=False)
header1_row = next(csv_module.reader([existing_headers.get("header1", "Header 1")]), [])
header2_row = next(csv_module.reader([existing_headers.get("header2", "Header 2")]), [])
if len(header2_row) > 1:
header2_row[1] = timestamp
header_row1 = pd.DataFrame([header1_row + [""] * (len(df.columns) - len(header1_row))], columns=df.columns)
header_row2 = pd.DataFrame([header2_row + [""] * (len(df.columns) - len(header2_row))], columns=df.columns)
table_headers = pd.DataFrame([df.columns.tolist()], columns=df.columns)
final_df = pd.concat([header_row1, header_row2, table_headers, df], ignore_index=True)
out = StringIO()
final_df.to_csv(out, index=False, header=False, quoting=1, escapechar='\\')
return out.getvalue().encode('utf-8')
def createExcelContent(self, data: list[dict], existing_headers: dict | None = None) -> bytes:
timestamp = datetime.fromtimestamp(self.services.utils.timestampGetUtc(), UTC).strftime("%Y-%m-%d %H:%M:%S UTC")
if existing_headers is None:
existing_headers = {"header1": "Header 1", "header2": "Header 2"}
if not data:
cols = list(self.TASK_SYNC_DEFINITION.keys())
df = pd.DataFrame(columns=cols)
else:
df = pd.DataFrame(data)
for column in df.columns:
df[column] = df[column].astype("object").fillna("")
df[column] = df[column].astype(str).str.replace('\n', '\\n', regex=False).str.replace('"', '""', regex=False)
header1_row = next(csv_module.reader([existing_headers.get("header1", "Header 1")]), [])
header2_row = next(csv_module.reader([existing_headers.get("header2", "Header 2")]), [])
if len(header2_row) > 1:
header2_row[1] = timestamp
header_row1 = pd.DataFrame([header1_row + [""] * (len(df.columns) - len(header1_row))], columns=df.columns)
header_row2 = pd.DataFrame([header2_row + [""] * (len(df.columns) - len(header2_row))], columns=df.columns)
table_headers = pd.DataFrame([df.columns.tolist()], columns=df.columns)
final_df = pd.concat([header_row1, header_row2, table_headers, df], ignore_index=True)
buf = BytesIO()
final_df.to_excel(buf, index=False, header=False, engine='openpyxl')
return buf.getvalue()
def parseExcelContent(self, excel_content: bytes) -> tuple[list[dict], dict]:
df = pd.read_excel(BytesIO(excel_content), engine='openpyxl', header=None)
header_row1 = df.iloc[0:1].copy()
header_row2 = df.iloc[1:2].copy()
table_headers = df.iloc[2:3].copy()
df_data = df.iloc[3:].copy()
df_data.columns = table_headers.iloc[0]
df_data = df_data.reset_index(drop=True)
for column in df_data.columns:
df_data[column] = df_data[column].astype('object').fillna('')
data = df_data.to_dict(orient='records')
headers = {
"header1": ",".join([str(x) if pd.notna(x) else "" for x in header_row1.iloc[0].tolist()]),
"header2": ",".join([str(x) if pd.notna(x) else "" for x in header_row2.iloc[0].tolist()]),
}
return data, headers
def convertAdfToText(self, adf_data):
"""Convert Atlassian Document Format (ADF) to plain text.
Based on Atlassian Document Format specification for JIRA fields.
Handles paragraphs, lists, text formatting, and other ADF node types.
Args:
adf_data: ADF object or None
Returns:
str: Plain text content, or empty string if None/invalid
"""
if not adf_data or not isinstance(adf_data, dict):
return ""
if adf_data.get("type") != "doc":
return str(adf_data) if adf_data else ""
content = adf_data.get("content", [])
if not isinstance(content, list):
return ""
def extractTextFromContent(contentList, listLevel=0):
"""Recursively extract text from ADF content with proper formatting."""
textParts = []
listCounter = 1
for item in contentList:
if not isinstance(item, dict):
continue
itemType = item.get("type", "")
if itemType == "text":
# Extract text content, preserving formatting
text = item.get("text", "")
marks = item.get("marks", [])
# Handle text formatting (bold, italic, etc.)
if marks:
for mark in marks:
if mark.get("type") == "strong":
text = f"**{text}**"
elif mark.get("type") == "em":
text = f"*{text}*"
elif mark.get("type") == "code":
text = f"`{text}`"
elif mark.get("type") == "link":
attrs = mark.get("attrs", {})
href = attrs.get("href", "")
if href:
text = f"[{text}]({href})"
textParts.append(text)
elif itemType == "hardBreak":
textParts.append("\n")
elif itemType == "paragraph":
paragraphContent = item.get("content", [])
if paragraphContent:
paragraphText = extractTextFromContent(paragraphContent, listLevel)
if paragraphText.strip():
textParts.append(paragraphText)
elif itemType == "bulletList":
listContent = item.get("content", [])
for listItem in listContent:
if listItem.get("type") == "listItem":
listItemContent = listItem.get("content", [])
for listParagraph in listItemContent:
if listParagraph.get("type") == "paragraph":
listParagraphContent = listParagraph.get("content", [])
if listParagraphContent:
indent = " " * listLevel
bulletText = extractTextFromContent(listParagraphContent, listLevel + 1)
if bulletText.strip():
textParts.append(f"{indent}{bulletText}")
elif itemType == "orderedList":
listContent = item.get("content", [])
for listItem in listContent:
if listItem.get("type") == "listItem":
listItemContent = listItem.get("content", [])
for listParagraph in listItemContent:
if listParagraph.get("type") == "paragraph":
listParagraphContent = listParagraph.get("content", [])
if listParagraphContent:
indent = " " * listLevel
orderedText = extractTextFromContent(listParagraphContent, listLevel + 1)
if orderedText.strip():
textParts.append(f"{indent}{listCounter}. {orderedText}")
listCounter += 1
elif itemType == "listItem":
# Handle nested list items
listItemContent = item.get("content", [])
if listItemContent:
textParts.append(extractTextFromContent(listItemContent, listLevel))
elif itemType == "embedCard":
# Handle embedded content (videos, etc.)
attrs = item.get("attrs", {})
url = attrs.get("url", "")
if url:
textParts.append(f"[Embedded Content: {url}]")
elif itemType == "codeBlock":
# Handle code blocks
codeContent = item.get("content", [])
if codeContent:
codeText = extractTextFromContent(codeContent, listLevel)
if codeText.strip():
textParts.append(f"```\n{codeText}\n```")
elif itemType == "blockquote":
# Handle blockquotes
quoteContent = item.get("content", [])
if quoteContent:
quoteText = extractTextFromContent(quoteContent, listLevel)
if quoteText.strip():
textParts.append(f"> {quoteText}")
elif itemType == "heading":
# Handle headings
headingContent = item.get("content", [])
if headingContent:
headingText = extractTextFromContent(headingContent, listLevel)
if headingText.strip():
level = item.get("attrs", {}).get("level", 1)
textParts.append(f"{'#' * level} {headingText}")
elif itemType == "rule":
# Handle horizontal rules
textParts.append("---")
else:
# Handle unknown types by trying to extract content
if "content" in item:
contentText = extractTextFromContent(item.get("content", []), listLevel)
if contentText.strip():
textParts.append(contentText)
return "\n".join(textParts)
result = extractTextFromContent(content)
return result.strip()
# Utility: dump all ticket fields (name -> field id) to a text file (generic)
async def dumpTicketFieldsToFile(self,
*,
filepath: str = "ticket_sync_fields.txt",
connectorType: str = "Jira",
connectorParams: dict | None = None,
taskSyncDefinition: dict | None = None,
) -> bool:
"""Write available ticket fields (name -> field id) to a text file (generic)."""
try:
connectorParams = connectorParams or {}
taskSyncDefinition = taskSyncDefinition or self.TASK_SYNC_DEFINITION
ticket_interface = await self.services.ticket.connectTicket(
taskSyncDefinition=taskSyncDefinition,
connectorType=connectorType,
connectorParams=connectorParams,
)
attributes = await ticket_interface.connector_ticket.readAttributes()
if not attributes:
logger.warning("No ticket attributes returned; nothing to write.")
return False
dir_name = os.path.dirname(filepath)
if dir_name:
os.makedirs(dir_name, exist_ok=True)
with open(filepath, "w", encoding="utf-8") as f:
for attr in attributes:
f.write(f"'{attr.field_name}': ['get', ['fields', '{attr.field}']]\n")
logger.info(f"Wrote {len(attributes)} ticket fields to {filepath}")
return True
except Exception as e:
logger.error(f"Failed to dump ticket fields: {str(e)}")
return False
# Utility: dump actual ticket data for debugging (generic)
async def dumpTicketDataToFile(self,
*,
filepath: str = "ticket_sync_data.txt",
connectorType: str = "Jira",
connectorParams: dict | None = None,
taskSyncDefinition: dict | None = None,
sampleLimit: int = 5,
) -> bool:
"""Write actual ticket data to a text file for debugging field mapping (generic)."""
try:
connectorParams = connectorParams or {}
taskSyncDefinition = taskSyncDefinition or self.TASK_SYNC_DEFINITION
ticket_interface = await self.services.ticket.connectTicket(
taskSyncDefinition=taskSyncDefinition,
connectorType=connectorType,
connectorParams=connectorParams,
)
tickets = await ticket_interface.connector_ticket.readTasks(limit=sampleLimit)
if not tickets:
logger.warning("No tickets returned; nothing to write.")
return False
dir_name = os.path.dirname(filepath)
if dir_name:
os.makedirs(dir_name, exist_ok=True)
with open(filepath, "w", encoding="utf-8") as f:
f.write("=== TICKET DATA DEBUG ===\n\n")
for i, ticket in enumerate(tickets):
f.write(f"--- TICKET {i+1} ---\n")
f.write("Raw ticket data:\n")
f.write(f"{ticket.data}\n\n")
f.write("Field mapping analysis:\n")
for fieldName, fieldPath in taskSyncDefinition.items():
if fieldPath[0] == 'get':
try:
value = ticket.data
for key in fieldPath[1]:
if isinstance(value, dict) and key in value:
value = value[key]
else:
value = f"KEY_NOT_FOUND: {key}"
break
if isinstance(value, dict) and value.get("type") == "doc":
pass # value = self.convertAdfToText(value)
elif value is None:
value = ""
f.write(f" {fieldName}: {value}\n")
except Exception as e:
f.write(f" {fieldName}: ERROR - {str(e)}\n")
f.write("\n" + "="*50 + "\n\n")
logger.info(f"Wrote ticket data for {len(tickets)} tickets to {filepath}")
return True
except Exception as e:
logger.error(f"Failed to dump ticket data: {str(e)}")
return False
# Main part of the module
async def performSync(eventUser) -> bool:
"""Perform tickets to SharePoint synchronization
This function is called by the scheduler and can be used independently.
Args:
eventUser: Event user to use for synchronization
Returns:
bool: True if synchronization was successful, False otherwise
"""
try:
logger.info("Starting DG tickets sync...")
if not eventUser:
logger.error("Event user not provided - cannot perform sync")
return False
# Sync audit logging is handled by ManagerSyncDelta instance
syncManager = ManagerSyncDelta(eventUser)
success = await syncManager.syncTicketsOverSharepoint()
if success:
logger.info("DG tickets sync completed successfully")
else:
logger.error("DG tickets sync failed")
return success
except Exception as e:
logger.error(f"Error in performing DG tickets sync: {str(e)}")
return False
# Create a global instance of ManagerSyncDelta to use for scheduled runs
_sync_manager = None
def startSyncManager(eventUser):
"""Initialize the global sync manager with the eventUser."""
global _sync_manager
if _sync_manager is None:
_sync_manager = ManagerSyncDelta(eventUser)
logger.info("Global sync manager initialized with eventUser")
try:
# Register scheduled job based on environment using the manager's services
if _sync_manager.APP_ENV_TYPE == "pause": # PAUSIERT: TODO prod
_sync_manager.services.utils.eventRegisterCron(
job_id="syncDelta.syncTicket",
func=scheduledSync,
cron_kwargs={"minute": "0,20,40"},
replace_existing=True,
coalesce=True,
max_instances=1,
misfire_grace_time=1800,
)
logger.info("Registered DG scheduler (every 20 minutes)")
else:
logger.info(f"Skipping DG scheduler registration for ticket sync in env: {_sync_manager.APP_ENV_TYPE}")
except Exception as e:
logger.error(f"Failed to register scheduler for DG sync: {str(e)}")
return _sync_manager
async def scheduledSync():
"""Scheduled sync function that uses the global sync manager."""
try:
global _sync_manager
if _sync_manager and _sync_manager.eventUser:
return await performSync(_sync_manager.eventUser)
else:
logger.error("Sync manager not properly initialized - no eventUser")
return False
except Exception as e:
logger.error(f"Error in scheduled sync: {str(e)}")
return False
# Scheduler registration and initialization are triggered by startSyncManager(eventUser)

View file

@ -0,0 +1,12 @@
"""
Workflow feature - handles workflow execution, scheduling, and chat playground operations.
Combines functionality from:
- automation: Automation workflow execution and scheduling
- chatPlayground: Chat playground workflow start/stop operations
"""
from .mainWorkflow import chatStart, chatStop, executeAutomation, syncAutomationEvents, createAutomationEventHandler
__all__ = ['chatStart', 'chatStop', 'executeAutomation', 'syncAutomationEvents', 'createAutomationEventHandler']

View file

@ -1,29 +1,65 @@
"""
Main automation service - handles automation workflow execution and scheduling.
Main workflow service - handles workflow execution, scheduling, and chat playground operations.
Moved from interfaces/interfaceDbChatObjects.py to follow proper architectural separation.
Combines functionality from:
- mainAutomation.py: Automation workflow execution and scheduling
- mainChatPlayground.py: Chat playground workflow start/stop operations
"""
import logging
import json
from typing import Dict, Any
from typing import Dict, Any, Optional
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum, AutomationDefinition
from modules.datamodels.datamodelUam import User
from modules.shared.timeUtils import getUtcTimestamp
from modules.shared.eventManagement import eventManager
from modules.services import getInterface as getServices
from modules.features.chatPlayground.mainChatPlayground import chatStart
from modules.workflows.workflowManager import WorkflowManager
from .subAutomationUtils import parseScheduleToCron, planToPrompt, replacePlaceholders
logger = logging.getLogger(__name__)
async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode: WorkflowModeEnum, workflowId: Optional[str] = None) -> ChatWorkflow:
"""
Starts a new chat or continues an existing one, then launches processing asynchronously.
Args:
currentUser: Current user
userInput: User input request
workflowId: Optional workflow ID to continue existing workflow
workflowMode: "Dynamic" for iterative dynamic-style processing, "Automation" for automated workflow execution
Example usage for Dynamic mode:
workflow = await chatStart(currentUser, userInput, workflowMode=WorkflowModeEnum.WORKFLOW_DYNAMIC)
"""
try:
services = getServices(currentUser, None)
workflowManager = WorkflowManager(services)
workflow = await workflowManager.workflowStart(userInput, workflowMode, workflowId)
return workflow
except Exception as e:
logger.error(f"Error starting chat: {str(e)}")
raise
async def chatStop(currentUser: User, workflowId: str) -> ChatWorkflow:
"""Stops a running chat."""
try:
services = getServices(currentUser, None)
workflowManager = WorkflowManager(services)
return await workflowManager.workflowStop(workflowId)
except Exception as e:
logger.error(f"Error stopping chat: {str(e)}")
raise
async def executeAutomation(automationId: str, services) -> ChatWorkflow:
"""Execute automation workflow immediately (test mode) with placeholder replacement.
Args:
automationId: ID of automation to execute
chatInterface: ChatObjects interface instance for data access
services: Services instance for data access
Returns:
ChatWorkflow instance created by automation execution
@ -38,7 +74,7 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
try:
# 1. Load automation definition
automation = chatInterface.getAutomationDefinition(automationId)
automation = services.interfaceDbChat.getAutomationDefinition(automationId)
if not automation:
raise ValueError(f"Automation {automationId} not found")
@ -74,7 +110,6 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
raise ValueError(errorMsg)
# Get user from database using services
services = getServices(chatInterface.currentUser, None)
creatorUser = services.interfaceDbApp.getUser(creatorUserId)
if not creatorUser:
raise ValueError(f"Creator user {creatorUserId} not found")
@ -112,7 +147,7 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
# Set workflow name with "automated" prefix
automationLabel = automation.get("label", "Unknown Automation")
workflowName = f"automated: {automationLabel}"
workflow = chatInterface.updateWorkflow(workflow.id, {"name": workflowName})
workflow = services.interfaceDbChat.updateWorkflow(workflow.id, {"name": workflowName})
logger.info(f"Set workflow {workflow.id} name to: {workflowName}")
# Update automation with execution log
@ -122,8 +157,7 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
if len(executionLogs) > 50:
executionLogs = executionLogs[-50:]
chatInterface.db.recordModify(
AutomationDefinition,
services.interfaceDbChat.updateAutomationDefinition(
automationId,
{"executionLogs": executionLogs}
)
@ -136,14 +170,13 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
# Update automation with execution log even on error
try:
automation = chatInterface.getAutomationDefinition(automationId)
automation = services.interfaceDbChat.getAutomationDefinition(automationId)
if automation:
executionLogs = automation.get("executionLogs", [])
executionLogs.append(executionLog)
if len(executionLogs) > 50:
executionLogs = executionLogs[-50:]
chatInterface.db.recordModify(
AutomationDefinition,
services.interfaceDbChat.updateAutomationDefinition(
automationId,
{"executionLogs": executionLogs}
)
@ -153,21 +186,18 @@ async def executeAutomation(automationId: str, chatInterface) -> ChatWorkflow:
raise
async def syncAutomationEvents(chatInterface, eventUser) -> Dict[str, Any]:
async def syncAutomationEvents(services, eventUser) -> Dict[str, Any]:
"""Automation event handler - syncs scheduler with all active automations.
Args:
chatInterface: ChatObjects interface instance for data access
services: Services instance for data access
eventUser: System-level event user for accessing automations
Returns:
Dictionary with sync results (synced count and event IDs)
"""
# Get all automation definitions filtered by RBAC (for current mandate)
filtered = chatInterface.db.getRecordsetWithRBAC(
AutomationDefinition,
eventUser
)
filtered = services.interfaceDbChat.getAllAutomationDefinitionsWithRBAC(eventUser)
registeredEvents = {}
@ -209,8 +239,7 @@ async def syncAutomationEvents(chatInterface, eventUser) -> Dict[str, Any]:
# Update automation with new eventId
if currentEventId != newEventId:
chatInterface.db.recordModify(
AutomationDefinition,
services.interfaceDbChat.updateAutomationDefinition(
automationId,
{"eventId": newEventId}
)
@ -221,8 +250,7 @@ async def syncAutomationEvents(chatInterface, eventUser) -> Dict[str, Any]:
if currentEventId:
try:
eventManager.remove(currentEventId)
chatInterface.db.recordModify(
AutomationDefinition,
services.interfaceDbChat.updateAutomationDefinition(
automationId,
{"eventId": None}
)
@ -280,7 +308,7 @@ def createAutomationEventHandler(automationId: str, eventUser):
# Execute automation with creator user's context
# executeAutomation is in same module, so we can call it directly
await executeAutomation(automationId, creatorServices.interfaceDbChat)
await executeAutomation(automationId, creatorServices)
logger.info(f"Successfully executed automation {automationId} as user {creatorUserId}")
except Exception as e:
logger.error(f"Error executing automation {automationId}: {str(e)}")

View file

@ -0,0 +1,383 @@
"""
Automation templates for workflow definitions.
Contains predefined workflow templates that can be used to create automation definitions.
"""
from typing import Dict, Any, List
# Automation templates structure
AUTOMATION_TEMPLATES: Dict[str, Any] = {
"sets": [
{
"template": {
"overview": "SharePoint Themen Zusammenfassung",
"tasks": [
{
"id": "Task01",
"title": "SharePoint Themen Zusammenfassung",
"description": "Erstellt eine Zusammenfassung aller SharePoint Sites und deren Inhalte",
"objective": "Erstelle eine Zusammenfassung aller SharePoint Themen (Sites) und deren Inhalte als Word-Dokument",
"actionList": [
{
"execMethod": "sharepoint",
"execAction": "findDocumentPath",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"searchQuery": "*",
"maxResults": 100
},
"execResultLabel": "sharepoint_sites_found"
},
{
"execMethod": "sharepoint",
"execAction": "listDocuments",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"pathQuery": "{{KEY:sharepointBasePath}}",
"includeSubfolders": True
},
"execResultLabel": "sharepoint_structure"
},
{
"execMethod": "ai",
"execAction": "process",
"execParameters": {
"aiPrompt": "{{KEY:summaryPrompt}}",
"documentList": ["sharepoint_sites_found", "sharepoint_structure"],
"resultType": "docx"
},
"execResultLabel": "sharepoint_summary"
},
{
"execMethod": "sharepoint",
"execAction": "uploadDocument",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"documentList": ["sharepoint_summary"],
"pathQuery": "{{KEY:sharepointFolderNameDestination}}"
},
"execResultLabel": "sharepoint_upload_result"
}
]
}
]
},
"parameters": {
"connectionName": "connection:msft:p.motsch@valueon.ch",
"sharepointBasePath": "/sites/company-share",
"sharepointFolderNameDestination": "/sites/company-share/Freigegebene Dokumente/15. Persoenliche Ordner/Patrick Motsch/output",
"summaryPrompt": "Erstelle eine umfassende Zusammenfassung aller SharePoint Sites und deren Inhalte. Strukturiere das Dokument nach Sites und fasse für jede Site die wichtigsten Themen, Ordnerstrukturen und Dokumente zusammen. Erstelle ein professionelles Word-Dokument mit Überschriften, Abschnitten und einer klaren Gliederung. Berücksichtige alle gefundenen Sites, deren Ordnerstrukturen und dokumentiere die wichtigsten Inhalte pro Site."
}
},
{
"template": {
"overview": "Immobilienrecherche Zürich",
"tasks": [
{
"id": "Task02",
"title": "Immobilienrecherche Zürich",
"description": "Webrecherche nach Immobilien im Kanton Zürich und Speicherung in Excel",
"objective": "Immobilienrecherche im Kanton Zürich zum Verkauf (5-20 Mio. CHF) und speichere Ergebnisse in Excel-Liste auf SharePoint",
"actionList": [
{
"execMethod": "ai",
"execAction": "webResearch",
"execParameters": {
"prompt": "{{KEY:immobilienResearchPrompt}}",
"urlList": ["{{KEY:immobilienResearchUrl}}"]
},
"execResultLabel": "immobilien_research_results"
},
{
"execMethod": "ai",
"execAction": "process",
"execParameters": {
"aiPrompt": "{{KEY:excelFormatPrompt}}",
"documentList": ["immobilien_research_results"],
"resultType": "xlsx"
},
"execResultLabel": "immobilien_excel_list"
},
{
"execMethod": "sharepoint",
"execAction": "uploadDocument",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"documentList": ["immobilien_excel_list"],
"pathQuery": "{{KEY:sharepointFolderNameDestination}}"
},
"execResultLabel": "immobilien_upload_result"
}
]
}
]
},
"parameters": {
"connectionName": "connection:msft:p.motsch@valueon.ch",
"sharepointFolderNameDestination": "/sites/company-share/Freigegebene Dokumente/15. Persoenliche Ordner/Patrick Motsch/output",
"immobilienResearchUrl": ["https://www.homegate.ch", "https://www.immoscout24.ch", "https://www.immowelt.ch"],
"immobilienResearchPrompt": "Suche nach Immobilien zum Verkauf im Kanton Zürich, Schweiz, im Preisbereich von 5-20 Millionen CHF. Sammle Informationen zu: Ort, Preis, Beschreibung, URL zu Bildern, Verkäufer/Kontaktinformationen.",
"excelFormatPrompt": "Erstelle eine Excel-Datei mit den recherchierten Immobilien. Jede Immobilie soll eine Zeile sein mit den folgenden Spalten: Ort, Preis (in CHF), Beschreibung, URL zu Bild, Verkäufer. Verwende die Daten aus der Webrecherche."
}
},
{
"template": {
"overview": "Spesenbelege Zusammenfassung",
"tasks": [
{
"id": "Task03",
"title": "Spesenbelege CSV Zusammenfassung",
"description": "Liest PDF-Spesenbelege aus SharePoint-Ordner und erstellt CSV-Zusammenfassung",
"objective": "Extrahiere alle PDF-Spesenbelege aus einem SharePoint-Ordner und erstelle eine CSV-Datei mit allen Spesendaten im selben Ordner",
"actionList": [
{
"execMethod": "sharepoint",
"execAction": "findDocumentPath",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"searchQuery": "{{KEY:sharepointFolderNameSource}}:files:.pdf",
"maxResults": 100
},
"execResultLabel": "sharepoint_pdf_files"
},
{
"execMethod": "sharepoint",
"execAction": "readDocuments",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"pathObject": "sharepoint_pdf_files"
},
"execResultLabel": "spesenbelege_documents"
},
{
"execMethod": "ai",
"execAction": "process",
"execParameters": {
"aiPrompt": "{{KEY:expenseExtractionPrompt}}",
"documentList": ["spesenbelege_documents"],
"resultType": "csv"
},
"execResultLabel": "spesenbelege_csv"
},
{
"execMethod": "sharepoint",
"execAction": "uploadDocument",
"execParameters": {
"connectionReference": "{{KEY:connectionName}}",
"documentList": ["spesenbelege_csv"],
"pathQuery": "{{KEY:sharepointFolderNameDestination}}"
},
"execResultLabel": "spesenbelege_upload_result"
}
]
}
]
},
"parameters": {
"connectionName": "connection:msft:p.motsch@valueon.ch",
"sharepointFolderNameSource": "/sites/company-share/Freigegebene Dokumente/15. Persoenliche Ordner/Patrick Motsch/expenses",
"sharepointFolderNameDestination": "/sites/company-share/Freigegebene Dokumente/15. Persoenliche Ordner/Patrick Motsch/output",
"expenseExtractionPrompt": "Verarbeite alle bereitgestellten Dokumente, aber extrahiere nur Daten aus PDF-Spesenbelegen (ignoriere andere Dateitypen). Für jeden gefundenen PDF-Spesenbeleg extrahiere als separaten Datensatz: Datum, Betrag, MWST %, Währung, Kategorie, Beschreibung, Rechnungsnummer, Händler/Verkäufer, Steuerbetrag. Erstelle eine CSV-Datei mit einer Zeile pro Spesenbeleg. Verwende die folgenden Spaltenüberschriften: Datum, Betrag, Währung, Kategorie, Beschreibung, Rechnungsnummer, Händler, Steuerbetrag. Stelle sicher, dass alle Beträge numerisch sind und Datumswerte im Format YYYY-MM-DD vorliegen. Wenn ein Dokument kein Spesenbeleg ist, ignoriere es."
}
},
{
"template": {
"overview": "Preprocessing Server Data Update",
"tasks": [
{
"id": "Task04",
"title": "Trigger Preprocessing Server",
"description": "Triggers the preprocessing server at customer tenant to update database with configuration",
"objective": "Call preprocessing server endpoint to update database with provided configuration JSON",
"actionList": [
{
"execMethod": "context",
"execAction": "triggerPreprocessingServer",
"execParameters": {
"endpoint": "{{KEY:endpoint}}",
"configJson": "{{KEY:configJson}}",
"authSecretConfigKey": "{{KEY:authSecretConfigKey}}"
},
"execResultLabel": "preprocessing_server_result"
}
]
}
]
},
"parameters": {
"endpoint": "https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataprocessor/update-db-with-config",
"authSecretConfigKey": "PREPROCESS_ALTHAUS_CHAT_SECRET",
"configJson": "{\"tables\":[{\"name\":\"Artikel\",\"powerbi_table_name\":\"Artikel\",\"steps\":[{\"keep\":{\"columns\":[\"I_ID\",\"Artikelbeschrieb\",\"Artikelbezeichnung\",\"Artikelgruppe\",\"Artikelkategorie\",\"Artikelkürzel\",\"Artikelnummer\",\"Einheit\",\"Gesperrt\",\"Keywords\",\"Lieferant\",\"Warengruppe\"]}},{\"fillna\":{\"column\":\"Lieferant\",\"value\":\"Unbekannt\"}}]},{\"name\":\"Einkaufspreis\",\"powerbi_table_name\":\"Einkaufspreis\",\"steps\":[{\"to_numeric\":{\"column\":\"EP_CHF\",\"errors\":\"coerce\"}},{\"dropna\":{\"subset\":[\"EP_CHF\"]}}]}]}"
}
},
{
"template": {
"overview": "JIRA to SharePoint Ticket Synchronization",
"tasks": [
{
"id": "Task01",
"title": "Sync JIRA Tickets to SharePoint",
"description": "Export JIRA tickets, merge with SharePoint file, upload back, and import changes to JIRA",
"objective": "Synchronize JIRA tickets with SharePoint file (bidirectional sync)",
"actionList": [
{
"execMethod": "sharepoint",
"execAction": "findSiteByUrl",
"execParameters": {
"connectionReference": "{{KEY:sharepointConnection}}",
"hostname": "{{KEY:sharepointHostname}}",
"sitePath": "{{KEY:sharepointSitePath}}"
},
"execResultLabel": "sharepoint_site"
},
{
"execMethod": "jira",
"execAction": "connectJira",
"execParameters": {
"apiUsername": "{{KEY:jiraUsername}}",
"apiTokenConfigKey": "{{KEY:jiraTokenConfigKey}}",
"apiUrl": "{{KEY:jiraUrl}}",
"projectCode": "{{KEY:jiraProjectCode}}",
"issueType": "{{KEY:jiraIssueType}}",
"taskSyncDefinition": "{{KEY:taskSyncDefinition}}"
},
"execResultLabel": "jira_connection"
},
{
"execMethod": "jira",
"execAction": "exportTicketsAsJson",
"execParameters": {
"connectionId": "jira_connection",
"taskSyncDefinition": "{{KEY:taskSyncDefinition}}"
},
"execResultLabel": "jira_exported_tickets"
},
{
"execMethod": "sharepoint",
"execAction": "downloadFileByPath",
"execParameters": {
"connectionReference": "{{KEY:sharepointConnection}}",
"siteId": "sharepoint_site",
"filePath": "{{KEY:sharepointMainFolder}}/{{KEY:syncFileName}}"
},
"execResultLabel": "existing_file_content"
},
{
"execMethod": "jira",
"execAction": "parseExcelContent",
"execParameters": {
"excelContent": "existing_file_content",
"skipRows": 3,
"hasCustomHeaders": True
},
"execResultLabel": "existing_parsed_data"
},
{
"execMethod": "jira",
"execAction": "mergeTicketData",
"execParameters": {
"jiraData": "jira_exported_tickets",
"existingData": "existing_parsed_data",
"taskSyncDefinition": "{{KEY:taskSyncDefinition}}",
"idField": "ID"
},
"execResultLabel": "merged_ticket_data"
},
{
"execMethod": "sharepoint",
"execAction": "copyFile",
"execParameters": {
"connectionReference": "{{KEY:sharepointConnection}}",
"siteId": "sharepoint_site",
"sourceFolder": "{{KEY:sharepointMainFolder}}",
"sourceFile": "{{KEY:syncFileName}}",
"destFolder": "{{KEY:sharepointBackupFolder}}",
"destFile": "backup_{{TIMESTAMP}}_{{KEY:syncFileName}}"
},
"execResultLabel": "file_backup"
},
{
"execMethod": "jira",
"execAction": "createExcelContent",
"execParameters": {
"data": "merged_ticket_data",
"headers": "existing_parsed_data",
"taskSyncDefinition": "{{KEY:taskSyncDefinition}}"
},
"execResultLabel": "new_file_content"
},
{
"execMethod": "sharepoint",
"execAction": "uploadFile",
"execParameters": {
"connectionReference": "{{KEY:sharepointConnection}}",
"siteId": "sharepoint_site",
"folderPath": "{{KEY:sharepointMainFolder}}",
"fileName": "{{KEY:syncFileName}}",
"content": "new_file_content"
},
"execResultLabel": "uploaded_file"
},
{
"execMethod": "sharepoint",
"execAction": "downloadFileByPath",
"execParameters": {
"connectionReference": "{{KEY:sharepointConnection}}",
"siteId": "sharepoint_site",
"filePath": "{{KEY:sharepointMainFolder}}/{{KEY:syncFileName}}"
},
"execResultLabel": "uploaded_file_content"
},
{
"execMethod": "jira",
"execAction": "parseExcelContent",
"execParameters": {
"excelContent": "uploaded_file_content",
"skipRows": 3,
"hasCustomHeaders": True
},
"execResultLabel": "import_data"
},
{
"execMethod": "jira",
"execAction": "importTicketsFromJson",
"execParameters": {
"connectionId": "jira_connection",
"ticketData": "import_data",
"taskSyncDefinition": "{{KEY:taskSyncDefinition}}"
},
"execResultLabel": "import_result"
}
]
}
]
},
"parameters": {
"sharepointConnection": "connection:msft:patrick.motsch@delta.ch",
"sharepointHostname": "deltasecurityag.sharepoint.com",
"sharepointSitePath": "SteeringBPM",
"sharepointMainFolder": "/General/50 Docs hosted by SELISE",
"sharepointBackupFolder": "/General/50 Docs hosted by SELISE/SyncHistory",
"syncFileName": "DELTAgroup x SELISE Ticket Exchange List.xlsx",
"jiraUsername": "p.motsch@valueon.ch",
"jiraTokenConfigKey": "Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET",
"jiraUrl": "https://deltasecurity.atlassian.net",
"jiraProjectCode": "DCS",
"jiraIssueType": "Task",
"taskSyncDefinition": "{\"ID\":[\"get\",[\"key\"]],\"Module Category\":[\"get\",[\"fields\",\"customfield_10058\",\"value\"]],\"Summary\":[\"get\",[\"fields\",\"summary\"]],\"Description\":[\"get\",[\"fields\",\"description\"]],\"References\":[\"get\",[\"fields\",\"customfield_10066\"]],\"Priority\":[\"get\",[\"fields\",\"priority\",\"name\"]],\"Issue Status\":[\"get\",[\"fields\",\"status\",\"name\"]],\"Assignee\":[\"get\",[\"fields\",\"assignee\",\"displayName\"]],\"Issue Created\":[\"get\",[\"fields\",\"created\"]],\"Due Date\":[\"get\",[\"fields\",\"duedate\"]],\"DELTA Comments\":[\"get\",[\"fields\",\"customfield_10167\"]],\"SELISE Ticket References\":[\"put\",[\"fields\",\"customfield_10067\"]],\"SELISE Status Values\":[\"put\",[\"fields\",\"customfield_10065\"]],\"SELISE Comments\":[\"put\",[\"fields\",\"customfield_10168\"]]}"
}
}
]
}
def getAutomationTemplates() -> Dict[str, Any]:
"""
Get automation templates.
Returns:
Dict containing the automation templates structure with 'sets' key.
"""
return AUTOMATION_TEMPLATES

View file

@ -6,6 +6,7 @@ Moved from interfaces/interfaceDbChatObjects.py.
import json
from typing import Dict, Any
from datetime import datetime, UTC
def parseScheduleToCron(schedule: str) -> Dict[str, Any]:
@ -29,8 +30,15 @@ def planToPrompt(plan: Dict) -> str:
def replacePlaceholders(template: str, placeholders: Dict[str, str]) -> str:
"""Replace placeholders in template with actual values. Placeholder format: {{KEY:PLACEHOLDER_NAME}}"""
"""Replace placeholders in template with actual values. Placeholder format: {{KEY:PLACEHOLDER_NAME}} or {{TIMESTAMP}}"""
result = template
# Replace TIMESTAMP placeholder first (calculated placeholder, not from parameters)
timestampPattern = "{{TIMESTAMP}}"
if timestampPattern in result:
timestamp = datetime.now(UTC).strftime("%Y%m%d_%H%M%S")
result = result.replace(timestampPattern, timestamp)
for placeholderName, value in placeholders.items():
pattern = f"{{{{KEY:{placeholderName}}}}}"

View file

@ -13,6 +13,7 @@ from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.interfaces.interfaceBootstrap import initBootstrap
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.security.rbac import RbacClass
from modules.datamodels.datamodelUam import (
User,
@ -420,7 +421,8 @@ class AppObjects:
If pagination is provided: PaginatedResult with items and metadata
"""
# Use RBAC filtering
users = self.db.getRecordsetWithRBAC(
users = getRecordsetWithRBAC(
self.db,
UserInDB,
self.currentUser,
recordFilter={"mandateId": mandateId} if mandateId else None
@ -474,7 +476,7 @@ class AppObjects:
"""Returns a user by username."""
try:
# Use RBAC filtering
users = self.db.getRecordsetWithRBAC(
users = getRecordsetWithRBAC(self.db,
UserInDB,
self.currentUser,
recordFilter={"username": username}
@ -501,7 +503,7 @@ class AppObjects:
"""Returns a user by ID if user has access."""
try:
# Get users filtered by RBAC
users = self.db.getRecordsetWithRBAC(
users = getRecordsetWithRBAC(self.db,
UserInDB,
self.currentUser,
recordFilter={"id": userId}
@ -754,7 +756,7 @@ class AppObjects:
if not initialUserId:
return None
users = self.db.getRecordsetWithRBAC(
users = getRecordsetWithRBAC(self.db,
UserInDB,
self.currentUser,
recordFilter={"id": initialUserId}
@ -913,7 +915,7 @@ class AppObjects:
If pagination is provided: PaginatedResult with items and metadata
"""
# Use RBAC filtering
allMandates = self.db.getRecordsetWithRBAC(Mandate, self.currentUser)
allMandates = getRecordsetWithRBAC(self.db, Mandate, self.currentUser)
# Filter out database-specific fields
filteredMandates = []
@ -954,7 +956,7 @@ class AppObjects:
def getMandate(self, mandateId: str) -> Optional[Mandate]:
"""Returns a mandate by ID if user has access."""
# Use RBAC filtering
mandates = self.db.getRecordsetWithRBAC(
mandates = getRecordsetWithRBAC(self.db,
Mandate,
self.currentUser,
recordFilter={"id": mandateId}
@ -1387,7 +1389,7 @@ class AppObjects:
"""Get the data neutralization configuration for the current user's mandate"""
try:
# Use RBAC filtering
filtered_configs = self.db.getRecordsetWithRBAC(
filtered_configs = getRecordsetWithRBAC(self.db,
DataNeutraliserConfig,
self.currentUser,
recordFilter={"mandateId": self.mandateId}
@ -1449,7 +1451,7 @@ class AppObjects:
filter_dict["fileId"] = file_id
# Use RBAC filtering
filtered_attributes = self.db.getRecordsetWithRBAC(
filtered_attributes = getRecordsetWithRBAC(self.db,
DataNeutralizerAttributes,
self.currentUser,
recordFilter=filter_dict
@ -1805,28 +1807,18 @@ def getRootInterface() -> AppObjects:
"""
Returns a AppObjects instance with root privileges.
This is used for initial setup and user creation.
Note: This function uses security.rootAccess internally to avoid circular dependencies.
Routes can continue using this function, but connectors/interfaces should use
security.rootAccess.getRootDbAppConnector() or security.rootAccess.getRootUser() directly.
"""
global _rootAppObjects
if _rootAppObjects is None:
try:
# Create a temporary interface without user context to get root user
tempInterface = AppObjects()
# Get the initial user directly
initialUserId = tempInterface.getInitialId(UserInDB)
if not initialUserId:
raise ValueError("No initial user ID found in database")
users = tempInterface.db.getRecordset(
UserInDB, recordFilter={"id": initialUserId}
)
if not users:
raise ValueError("Initial user not found in database")
# Convert to User model (use helper compatible with our models)
user_data = users[0]
rootUser = User(**user_data)
# Use security.rootAccess to get root user (avoids circular dependencies)
from modules.security.rootAccess import getRootUser
rootUser = getRootUser()
# Create root interface with the root user
_rootAppObjects = AppObjects(rootUser)

View file

@ -31,6 +31,7 @@ from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResult
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
# Basic Configurations
from modules.shared.configuration import APP_CONFIG
@ -269,8 +270,8 @@ class ChatObjects:
if not self.currentUser:
raise ValueError("User context is required for RBAC")
# Get DbApp connection for RBAC AccessRule queries
from modules.interfaces.interfaceDbAppObjects import getRootInterface
dbApp = getRootInterface().db
from modules.security.rootAccess import getRootDbAppConnector
dbApp = getRootDbAppConnector()
self.rbac = RbacClass(self.db, dbApp=dbApp)
# Update database context
@ -584,7 +585,7 @@ class ChatObjects:
If pagination is provided: PaginatedResult with items and metadata
"""
# Use RBAC filtering
filteredWorkflows = self.db.getRecordsetWithRBAC(
filteredWorkflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser
)
@ -619,7 +620,7 @@ class ChatObjects:
def getWorkflow(self, workflowId: str) -> Optional[ChatWorkflow]:
"""Returns a workflow by ID if user has access."""
# Use RBAC filtering
workflows = self.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser,
recordFilter={"id": workflowId}
@ -760,12 +761,12 @@ class ChatObjects:
messageId = message.id
if messageId:
# Delete message stats
existing_stats = self.db.getRecordsetWithRBAC(ChatStat, self.currentUser, recordFilter={"messageId": messageId})
existing_stats = getRecordsetWithRBAC(self.db, ChatStat, self.currentUser, recordFilter={"messageId": messageId})
for stat in existing_stats:
self.db.recordDelete(ChatStat, stat["id"])
# Delete message documents (but NOT the files!)
existing_docs = self.db.getRecordsetWithRBAC(ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
existing_docs = getRecordsetWithRBAC(self.db, ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
for doc in existing_docs:
self.db.recordDelete(ChatDocument, doc["id"])
@ -773,12 +774,12 @@ class ChatObjects:
self.db.recordDelete(ChatMessage, messageId)
# 2. Delete workflow stats
existing_stats = self.db.getRecordsetWithRBAC(ChatStat, self.currentUser, recordFilter={"workflowId": workflowId})
existing_stats = getRecordsetWithRBAC(self.db, ChatStat, self.currentUser, recordFilter={"workflowId": workflowId})
for stat in existing_stats:
self.db.recordDelete(ChatStat, stat["id"])
# 3. Delete workflow logs
existing_logs = self.db.getRecordsetWithRBAC(ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
existing_logs = getRecordsetWithRBAC(self.db, ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
for log in existing_logs:
self.db.recordDelete(ChatLog, log["id"])
@ -809,7 +810,7 @@ class ChatObjects:
"""
# Check workflow access first (without calling getWorkflow to avoid circular reference)
# Use RBAC filtering
workflows = self.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser,
recordFilter={"id": workflowId}
@ -821,7 +822,7 @@ class ChatObjects:
return PaginatedResult(items=[], totalItems=0, totalPages=0)
# Get messages for this workflow from normalized table
messages = self.db.getRecordsetWithRBAC(ChatMessage, self.currentUser, recordFilter={"workflowId": workflowId})
messages = getRecordsetWithRBAC(self.db, ChatMessage, self.currentUser, recordFilter={"workflowId": workflowId})
# Convert raw messages to dict format for sorting/filtering
messageDicts = []
@ -1062,7 +1063,7 @@ class ChatObjects:
raise ValueError("messageId cannot be empty")
# Check if message exists in database
messages = self.db.getRecordsetWithRBAC(ChatMessage, self.currentUser, recordFilter={"id": messageId})
messages = getRecordsetWithRBAC(self.db, ChatMessage, self.currentUser, recordFilter={"id": messageId})
if not messages:
logger.warning(f"Message with ID {messageId} does not exist in database")
@ -1167,12 +1168,12 @@ class ChatObjects:
# CASCADE DELETE: Delete all related data first
# 1. Delete message stats
existing_stats = self.db.getRecordsetWithRBAC(ChatStat, self.currentUser, recordFilter={"messageId": messageId})
existing_stats = getRecordsetWithRBAC(self.db, ChatStat, self.currentUser, recordFilter={"messageId": messageId})
for stat in existing_stats:
self.db.recordDelete(ChatStat, stat["id"])
# 2. Delete message documents (but NOT the files!)
existing_docs = self.db.getRecordsetWithRBAC(ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
existing_docs = getRecordsetWithRBAC(self.db, ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
for doc in existing_docs:
self.db.recordDelete(ChatDocument, doc["id"])
@ -1199,7 +1200,7 @@ class ChatObjects:
# Get documents for this message from normalized table
documents = self.db.getRecordsetWithRBAC(ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
documents = getRecordsetWithRBAC(self.db, ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
if not documents:
logger.warning(f"No documents found for message {messageId}")
@ -1242,7 +1243,7 @@ class ChatObjects:
def getDocuments(self, messageId: str) -> List[ChatDocument]:
"""Returns documents for a message from normalized table."""
try:
documents = self.db.getRecordsetWithRBAC(ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
documents = getRecordsetWithRBAC(self.db, ChatDocument, self.currentUser, recordFilter={"messageId": messageId})
return [ChatDocument(**doc) for doc in documents]
except Exception as e:
logger.error(f"Error getting message documents: {str(e)}")
@ -1279,7 +1280,7 @@ class ChatObjects:
"""
# Check workflow access first (without calling getWorkflow to avoid circular reference)
# Use RBAC filtering
workflows = self.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser,
recordFilter={"id": workflowId}
@ -1291,7 +1292,7 @@ class ChatObjects:
return PaginatedResult(items=[], totalItems=0, totalPages=0)
# Get logs for this workflow from normalized table
logs = self.db.getRecordsetWithRBAC(ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
logs = getRecordsetWithRBAC(self.db, ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
# Convert raw logs to dict format for sorting/filtering
logDicts = []
@ -1400,7 +1401,7 @@ class ChatObjects:
"""Returns list of statistics for a workflow if user has access."""
# Check workflow access first (without calling getWorkflow to avoid circular reference)
# Use RBAC filtering
workflows = self.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser,
recordFilter={"id": workflowId}
@ -1410,7 +1411,7 @@ class ChatObjects:
return []
# Get stats for this workflow from normalized table
stats = self.db.getRecordsetWithRBAC(ChatStat, self.currentUser, recordFilter={"workflowId": workflowId})
stats = getRecordsetWithRBAC(self.db, ChatStat, self.currentUser, recordFilter={"workflowId": workflowId})
if not stats:
return []
@ -1447,7 +1448,7 @@ class ChatObjects:
"""
# Check workflow access first
# Use RBAC filtering
workflows = self.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(self.db,
ChatWorkflow,
self.currentUser,
recordFilter={"id": workflowId}
@ -1460,7 +1461,7 @@ class ChatObjects:
items = []
# Get messages
messages = self.db.getRecordsetWithRBAC(ChatMessage, self.currentUser, recordFilter={"workflowId": workflowId})
messages = getRecordsetWithRBAC(self.db, ChatMessage, self.currentUser, recordFilter={"workflowId": workflowId})
for msg in messages:
# Apply timestamp filtering in Python
msgTimestamp = parseTimestamp(msg.get("publishedAt"), default=getUtcTimestamp())
@ -1501,7 +1502,7 @@ class ChatObjects:
})
# Get logs
logs = self.db.getRecordsetWithRBAC(ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
logs = getRecordsetWithRBAC(self.db, ChatLog, self.currentUser, recordFilter={"workflowId": workflowId})
for log in logs:
# Apply timestamp filtering in Python
logTimestamp = parseTimestamp(log.get("timestamp"), default=getUtcTimestamp())
@ -1611,7 +1612,7 @@ class ChatObjects:
Computes status field for each automation.
"""
# Use RBAC filtering
filteredAutomations = self.db.getRecordsetWithRBAC(
filteredAutomations = getRecordsetWithRBAC(self.db,
AutomationDefinition,
self.currentUser
)
@ -1657,7 +1658,7 @@ class ChatObjects:
"""Returns an automation definition by ID if user has access, with computed status."""
try:
# Use RBAC filtering
filtered = self.db.getRecordsetWithRBAC(
filtered = getRecordsetWithRBAC(self.db,
AutomationDefinition,
self.currentUser,
recordFilter={"id": automationId}
@ -1780,6 +1781,23 @@ class ChatObjects:
logger.error(f"Error deleting automation definition: {str(e)}")
raise
def getAllAutomationDefinitionsWithRBAC(self, user: User) -> List[Dict[str, Any]]:
"""
Get all automation definitions filtered by RBAC for a specific user.
This method encapsulates getRecordsetWithRBAC() to avoid exposing the connector.
Args:
user: User object for RBAC filtering
Returns:
List of automation definition dictionaries filtered by RBAC
"""
return getRecordsetWithRBAC(
self.db,
AutomationDefinition,
user
)
async def _notifyAutomationChanged(self):
"""Notify registered callbacks about automation changes (decoupled from features)."""
try:

View file

@ -11,12 +11,19 @@ import math
from typing import Dict, Any, List, Optional, Union
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.security.rbac import RbacClass
from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.datamodels.datamodelUam import AccessLevel
from modules.datamodels.datamodelFiles import FilePreview, FileItem, FileData
from modules.datamodels.datamodelUtils import Prompt
from modules.datamodels.datamodelVoice import VoiceSettings
from modules.datamodels.datamodelMessaging import (
MessagingSubscription,
MessagingSubscriptionRegistration,
MessagingDelivery,
MessagingChannel
)
from modules.datamodels.datamodelUam import User, Mandate
from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp
@ -86,8 +93,8 @@ class ComponentObjects:
if not self.currentUser:
raise ValueError("User context is required for RBAC")
# Get DbApp connection for RBAC AccessRule queries
from modules.interfaces.interfaceDbAppObjects import getRootInterface
dbApp = getRootInterface().db
from modules.security.rootAccess import getRootDbAppConnector
dbApp = getRootDbAppConnector()
self.rbac = RbacClass(self.db, dbApp=dbApp)
# Update database context
@ -155,8 +162,10 @@ class ComponentObjects:
return
# Get the root interface to access the initial mandate ID
from modules.interfaces.interfaceDbAppObjects import getRootInterface
rootInterface = getRootInterface()
from modules.security.rootAccess import getRootUser
from modules.interfaces.interfaceDbAppObjects import getInterface
rootUser = getRootUser()
rootInterface = getInterface(rootUser)
# Get initial mandate ID through the root interface
mandateId = rootInterface.getInitialId(Mandate)
@ -501,7 +510,7 @@ class ComponentObjects:
"""
try:
# Use RBAC filtering
filteredPrompts = self.db.getRecordsetWithRBAC(
filteredPrompts = getRecordsetWithRBAC(self.db,
Prompt,
self.currentUser
)
@ -545,7 +554,7 @@ class ComponentObjects:
def getPrompt(self, promptId: str) -> Optional[Prompt]:
"""Returns a prompt by ID if user has access."""
# Use RBAC filtering
filteredPrompts = self.db.getRecordsetWithRBAC(
filteredPrompts = getRecordsetWithRBAC(self.db,
Prompt,
self.currentUser,
recordFilter={"id": promptId}
@ -612,7 +621,7 @@ class ComponentObjects:
If fileName is provided, also checks for exact name+hash match.
Only returns files the current user has access to."""
# Get files with the hash, filtered by RBAC
accessibleFiles = self.db.getRecordsetWithRBAC(
accessibleFiles = getRecordsetWithRBAC(self.db,
FileItem,
self.currentUser,
recordFilter={"fileHash": fileHash}
@ -742,7 +751,7 @@ class ComponentObjects:
If pagination is provided: PaginatedResult with items and metadata
"""
# Use RBAC filtering
filteredFiles = self.db.getRecordsetWithRBAC(
filteredFiles = getRecordsetWithRBAC(self.db,
FileItem,
self.currentUser
)
@ -809,7 +818,7 @@ class ComponentObjects:
def getFile(self, fileId: str) -> Optional[FileItem]:
"""Returns a file by ID if user has access."""
# Use RBAC filtering
filteredFiles = self.db.getRecordsetWithRBAC(
filteredFiles = getRecordsetWithRBAC(self.db,
FileItem,
self.currentUser,
recordFilter={"id": fileId}
@ -842,7 +851,7 @@ class ComponentObjects:
def _isfileNameUnique(self, fileName: str, excludeFileId: Optional[str] = None) -> bool:
"""Checks if a fileName is unique for the current user."""
# Get all files filtered by RBAC (will be filtered by user's access level)
files = self.db.getRecordsetWithRBAC(
files = getRecordsetWithRBAC(self.db,
FileItem,
self.currentUser
)
@ -937,7 +946,7 @@ class ComponentObjects:
# Check for other references to this file (by hash) - use RBAC to only check files user has access to
fileHash = file.fileHash
if fileHash:
allReferences = self.db.getRecordsetWithRBAC(
allReferences = getRecordsetWithRBAC(self.db,
FileItem,
self.currentUser,
recordFilter={"fileHash": fileHash}
@ -947,7 +956,7 @@ class ComponentObjects:
# Only delete associated fileData if no other references exist
if not otherReferences:
try:
fileDataEntries = self.db.getRecordsetWithRBAC(FileData, self.currentUser, recordFilter={"id": fileId})
fileDataEntries = getRecordsetWithRBAC(self.db, FileData, self.currentUser, recordFilter={"id": fileId})
if fileDataEntries:
self.db.recordDelete(FileData, fileId)
logger.debug(f"FileData for file {fileId} deleted")
@ -1032,7 +1041,7 @@ class ComponentObjects:
logger.warning(f"No access to file ID {fileId}")
return None
fileDataEntries = self.db.getRecordsetWithRBAC(FileData, self.currentUser, recordFilter={"id": fileId})
fileDataEntries = getRecordsetWithRBAC(self.db, FileData, self.currentUser, recordFilter={"id": fileId})
if not fileDataEntries:
logger.warning(f"No data found for file ID {fileId}")
return None
@ -1192,7 +1201,7 @@ class ComponentObjects:
return None
# Get voice settings for the user, filtered by RBAC
filteredSettings = self.db.getRecordsetWithRBAC(
filteredSettings = getRecordsetWithRBAC(self.db,
VoiceSettings,
self.currentUser,
recordFilter={"userId": targetUserId}
@ -1326,6 +1335,325 @@ class ComponentObjects:
logger.error(f"Error getting or creating voice settings: {str(e)}")
raise
# Messaging Subscription methods
def getAllSubscriptions(self, pagination: Optional[PaginationParams] = None) -> Union[List[MessagingSubscription], PaginatedResult]:
"""
Returns subscriptions based on user access level.
Supports optional pagination, sorting, and filtering.
"""
try:
filteredSubscriptions = getRecordsetWithRBAC(self.db,
MessagingSubscription,
self.currentUser
)
if pagination is None:
return [MessagingSubscription(**sub) for sub in filteredSubscriptions]
if pagination.filters:
filteredSubscriptions = self._applyFilters(filteredSubscriptions, pagination.filters)
if pagination.sort:
filteredSubscriptions = self._applySorting(filteredSubscriptions, pagination.sort)
totalItems = len(filteredSubscriptions)
totalPages = math.ceil(totalItems / pagination.pageSize) if totalItems > 0 else 0
startIdx = (pagination.page - 1) * pagination.pageSize
endIdx = startIdx + pagination.pageSize
pagedSubscriptions = filteredSubscriptions[startIdx:endIdx]
items = [MessagingSubscription(**sub) for sub in pagedSubscriptions]
return PaginatedResult(
items=items,
totalItems=totalItems,
totalPages=totalPages
)
except Exception as e:
logger.error(f"Error getting subscriptions: {str(e)}")
if pagination is None:
return []
return PaginatedResult(items=[], totalItems=0, totalPages=0)
def getSubscription(self, subscriptionId: str) -> Optional[MessagingSubscription]:
"""Returns a subscription by subscriptionId if user has access."""
filteredSubscriptions = getRecordsetWithRBAC(self.db,
MessagingSubscription,
self.currentUser,
recordFilter={"subscriptionId": subscriptionId}
)
return MessagingSubscription(**filteredSubscriptions[0]) if filteredSubscriptions else None
def getSubscriptionById(self, id: str) -> Optional[MessagingSubscription]:
"""Returns a subscription by UUID if user has access."""
filteredSubscriptions = getRecordsetWithRBAC(self.db,
MessagingSubscription,
self.currentUser,
recordFilter={"id": id}
)
return MessagingSubscription(**filteredSubscriptions[0]) if filteredSubscriptions else None
def createSubscription(self, subscriptionData: Dict[str, Any]) -> Dict[str, Any]:
"""Creates a new subscription if user has permission."""
if not self.checkRbacPermission(MessagingSubscription, "create"):
raise PermissionError("No permission to create subscriptions")
# Validate subscriptionId (only letters and underscores)
subscriptionId = subscriptionData.get("subscriptionId", "")
if not subscriptionId:
raise ValueError("subscriptionId is required")
# Check that subscriptionId contains only letters and underscores (no numbers)
if not all(c.isalpha() or c == "_" for c in subscriptionId):
raise ValueError("subscriptionId must contain only letters and underscores")
# Set mandateId if not provided
if "mandateId" not in subscriptionData:
subscriptionData["mandateId"] = self.currentUser.mandateId if self.currentUser else "default"
createdRecord = self.db.recordCreate(MessagingSubscription, subscriptionData)
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create subscription record")
return createdRecord
def updateSubscription(self, subscriptionId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
"""Updates a subscription if user has access."""
subscription = self.getSubscription(subscriptionId)
if not subscription:
raise ValueError(f"Subscription {subscriptionId} not found")
self.db.recordModify(MessagingSubscription, subscription.id, updateData)
updatedSubscription = self.getSubscription(subscriptionId)
if not updatedSubscription:
raise ValueError("Failed to retrieve updated subscription")
return updatedSubscription.model_dump()
def deleteSubscription(self, subscriptionId: str) -> bool:
"""Deletes a subscription if user has access."""
subscription = self.getSubscription(subscriptionId)
if not subscription:
return False
if not self.checkRbacPermission(MessagingSubscription, "update", subscription.id):
raise PermissionError(f"No permission to delete subscription {subscriptionId}")
return self.db.recordDelete(MessagingSubscription, subscription.id)
# Messaging Registration methods
def getAllRegistrations(
self,
subscriptionId: Optional[str] = None,
userId: Optional[str] = None,
pagination: Optional[PaginationParams] = None
) -> Union[List[MessagingSubscriptionRegistration], PaginatedResult]:
"""Returns registrations based on user access level."""
try:
recordFilter = {}
if subscriptionId:
recordFilter["subscriptionId"] = subscriptionId
if userId:
recordFilter["userId"] = userId
filteredRegistrations = getRecordsetWithRBAC(self.db,
MessagingSubscriptionRegistration,
self.currentUser,
recordFilter=recordFilter if recordFilter else None
)
if pagination is None:
return [MessagingSubscriptionRegistration(**reg) for reg in filteredRegistrations]
if pagination.filters:
filteredRegistrations = self._applyFilters(filteredRegistrations, pagination.filters)
if pagination.sort:
filteredRegistrations = self._applySorting(filteredRegistrations, pagination.sort)
totalItems = len(filteredRegistrations)
totalPages = math.ceil(totalItems / pagination.pageSize) if totalItems > 0 else 0
startIdx = (pagination.page - 1) * pagination.pageSize
endIdx = startIdx + pagination.pageSize
pagedRegistrations = filteredRegistrations[startIdx:endIdx]
items = [MessagingSubscriptionRegistration(**reg) for reg in pagedRegistrations]
return PaginatedResult(
items=items,
totalItems=totalItems,
totalPages=totalPages
)
except Exception as e:
logger.error(f"Error getting registrations: {str(e)}")
if pagination is None:
return []
return PaginatedResult(items=[], totalItems=0, totalPages=0)
def getRegistration(self, registrationId: str) -> Optional[MessagingSubscriptionRegistration]:
"""Returns a registration by ID if user has access."""
filteredRegistrations = getRecordsetWithRBAC(self.db,
MessagingSubscriptionRegistration,
self.currentUser,
recordFilter={"id": registrationId}
)
return MessagingSubscriptionRegistration(**filteredRegistrations[0]) if filteredRegistrations else None
def createRegistration(self, registrationData: Dict[str, Any]) -> Dict[str, Any]:
"""Creates a new registration if user has permission."""
if not self.checkRbacPermission(MessagingSubscriptionRegistration, "create"):
raise PermissionError("No permission to create registrations")
# Set userId if not provided
if "userId" not in registrationData:
registrationData["userId"] = self.userId
createdRecord = self.db.recordCreate(MessagingSubscriptionRegistration, registrationData)
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create registration record")
return createdRecord
def updateRegistration(self, registrationId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
"""Updates a registration if user has access."""
registration = self.getRegistration(registrationId)
if not registration:
raise ValueError(f"Registration {registrationId} not found")
self.db.recordModify(MessagingSubscriptionRegistration, registrationId, updateData)
updatedRegistration = self.getRegistration(registrationId)
if not updatedRegistration:
raise ValueError("Failed to retrieve updated registration")
return updatedRegistration.model_dump()
def deleteRegistration(self, registrationId: str) -> bool:
"""Deletes a registration if user has access."""
registration = self.getRegistration(registrationId)
if not registration:
return False
if not self.checkRbacPermission(MessagingSubscriptionRegistration, "update", registrationId):
raise PermissionError(f"No permission to delete registration {registrationId}")
return self.db.recordDelete(MessagingSubscriptionRegistration, registrationId)
def subscribeUser(
self,
subscriptionId: str,
userId: str,
channel: MessagingChannel,
channelConfig: str
) -> Dict[str, Any]:
"""Subscribes a user to a subscription with a specific channel."""
# Check if subscription exists
subscription = self.getSubscription(subscriptionId)
if not subscription:
raise ValueError(f"Subscription {subscriptionId} not found")
# Check if registration already exists
existingRegistrations = self.getAllRegistrations(subscriptionId=subscriptionId, userId=userId)
for reg in existingRegistrations:
if reg.channel == channel:
# Update existing registration
return self.updateRegistration(reg.id, {"enabled": True, "channelConfig": channelConfig})
# Create new registration
registrationData = {
"subscriptionId": subscriptionId,
"userId": userId,
"channel": channel.value,
"channelConfig": channelConfig,
"enabled": True
}
return self.createRegistration(registrationData)
def unsubscribeUser(self, subscriptionId: str, userId: str, channel: MessagingChannel) -> bool:
"""Unsubscribes a user from a subscription for a specific channel."""
registrations = self.getAllRegistrations(subscriptionId=subscriptionId, userId=userId)
for reg in registrations:
if reg.channel == channel:
return self.deleteRegistration(reg.id)
return False
# Messaging Delivery methods
def createDelivery(self, delivery: MessagingDelivery) -> Dict[str, Any]:
"""Creates a new delivery record."""
deliveryData = delivery.model_dump() if isinstance(delivery, MessagingDelivery) else delivery
createdRecord = self.db.recordCreate(MessagingDelivery, deliveryData)
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create delivery record")
return createdRecord
def updateDelivery(self, deliveryId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
"""Updates a delivery record."""
self.db.recordModify(MessagingDelivery, deliveryId, updateData)
return updateData
def getDeliveries(
self,
subscriptionId: Optional[str] = None,
userId: Optional[str] = None,
pagination: Optional[PaginationParams] = None
) -> Union[List[MessagingDelivery], PaginatedResult]:
"""Returns deliveries based on user access level."""
try:
recordFilter = {}
if subscriptionId:
recordFilter["subscriptionId"] = subscriptionId
if userId:
recordFilter["userId"] = userId
filteredDeliveries = getRecordsetWithRBAC(self.db,
MessagingDelivery,
self.currentUser,
recordFilter=recordFilter if recordFilter else None
)
if pagination is None:
return [MessagingDelivery(**delivery) for delivery in filteredDeliveries]
if pagination.filters:
filteredDeliveries = self._applyFilters(filteredDeliveries, pagination.filters)
if pagination.sort:
filteredDeliveries = self._applySorting(filteredDeliveries, pagination.sort)
totalItems = len(filteredDeliveries)
totalPages = math.ceil(totalItems / pagination.pageSize) if totalItems > 0 else 0
startIdx = (pagination.page - 1) * pagination.pageSize
endIdx = startIdx + pagination.pageSize
pagedDeliveries = filteredDeliveries[startIdx:endIdx]
items = [MessagingDelivery(**delivery) for delivery in pagedDeliveries]
return PaginatedResult(
items=items,
totalItems=totalItems,
totalPages=totalPages
)
except Exception as e:
logger.error(f"Error getting deliveries: {str(e)}")
if pagination is None:
return []
return PaginatedResult(items=[], totalItems=0, totalPages=0)
def getDelivery(self, deliveryId: str) -> Optional[MessagingDelivery]:
"""Returns a delivery by ID if user has access."""
filteredDeliveries = getRecordsetWithRBAC(self.db,
MessagingDelivery,
self.currentUser,
recordFilter={"id": deliveryId}
)
return MessagingDelivery(**filteredDeliveries[0]) if filteredDeliveries else None
def getInterface(currentUser: Optional[User] = None) -> 'ComponentObjects':
"""

View file

@ -0,0 +1,85 @@
"""
Interface for Messaging Services
Provides a unified interface for sending messages across different channels (Email, SMS, etc.)
"""
import logging
from typing import Optional
from modules.connectors.connectorMessagingEmail import ConnectorMessagingEmail
from modules.connectors.connectorMessagingSms import ConnectorMessagingSms
from modules.datamodels.datamodelMessaging import MessagingChannel
logger = logging.getLogger(__name__)
# Singleton factory for Messaging instances
_instancesMessaging = {}
class MessagingInterface:
"""
Interface for Messaging Services.
Provides a unified interface for sending messages across different channels.
"""
def __init__(self):
"""Initialize the Messaging Interface."""
self._emailConnector: Optional[ConnectorMessagingEmail] = None
self._smsConnector: Optional[ConnectorMessagingSms] = None
def send(self, channel: MessagingChannel, recipient: str, subject: str, message: str) -> bool:
"""
Send a message via the specified channel.
Args:
channel: MessagingChannel Enum (EMAIL, SMS, etc.)
recipient: Recipient address (email address, phone number, etc.)
subject: Message subject (for email, ignored for SMS)
message: Message content
Returns:
bool: True if successful, False otherwise
"""
try:
if channel == MessagingChannel.EMAIL:
return self._sendEmail(recipient, subject, message)
elif channel == MessagingChannel.SMS:
return self._sendSms(recipient, message)
else:
logger.error(f"Unknown channel: {channel}")
return False
except Exception as e:
logger.error(f"Error sending message via {channel}: {e}")
return False
def _sendEmail(self, recipient: str, subject: str, message: str) -> bool:
"""Send email via Email connector."""
if self._emailConnector is None:
try:
self._emailConnector = ConnectorMessagingEmail()
except Exception as e:
logger.error(f"Failed to initialize Email connector: {e}")
return False
return self._emailConnector.send(recipient, subject, message)
def _sendSms(self, recipient: str, message: str) -> bool:
"""Send SMS via SMS connector."""
if self._smsConnector is None:
try:
self._smsConnector = ConnectorMessagingSms()
except Exception as e:
logger.error(f"Failed to initialize SMS connector: {e}")
return False
return self._smsConnector.send(recipient, "", message) # SMS has no subject
def getInterface() -> MessagingInterface:
"""
Returns a MessagingInterface instance (singleton pattern).
"""
if "default" not in _instancesMessaging:
_instancesMessaging["default"] = MessagingInterface()
return _instancesMessaging["default"]

View file

@ -0,0 +1,227 @@
"""
RBAC helper functions for interfaces.
Provides RBAC filtering for database queries without connectors importing security.
"""
import logging
import json
from typing import List, Dict, Any, Optional, Type
from pydantic import BaseModel
from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
from modules.security.rbac import RbacClass
from modules.security.rootAccess import getRootDbAppConnector
logger = logging.getLogger(__name__)
def getRecordsetWithRBAC(
connector, # DatabaseConnector instance
modelClass: Type[BaseModel],
currentUser: User,
recordFilter: Dict[str, Any] = None,
orderBy: str = None,
limit: int = None,
) -> List[Dict[str, Any]]:
"""
Get records with RBAC filtering applied at database level.
This function wraps connector.getRecordset() with RBAC logic.
Args:
connector: DatabaseConnector instance
modelClass: Pydantic model class for the table
currentUser: User object with roleLabels
recordFilter: Additional record filters
orderBy: Field to order by (defaults to "id")
limit: Maximum number of records to return
Returns:
List of filtered records
"""
table = modelClass.__name__
try:
if not connector._ensureTableExists(modelClass):
return []
# Get RBAC permissions for this table
# AccessRule table is always in DbApp database
dbApp = getRootDbAppConnector()
rbacInstance = RbacClass(connector, dbApp=dbApp)
permissions = rbacInstance.getUserPermissions(
currentUser,
AccessRuleContext.DATA,
table
)
# Check view permission first
if not permissions.view:
logger.debug(f"User {currentUser.id} has no view permission for table {table}")
return []
# Build WHERE clause with RBAC filtering
whereConditions = []
whereValues = []
# Add RBAC WHERE clause based on read permission
rbacWhereClause = buildRbacWhereClause(permissions, currentUser, table, connector)
if rbacWhereClause:
whereConditions.append(rbacWhereClause["condition"])
whereValues.extend(rbacWhereClause["values"])
# Add additional record filters
if recordFilter:
for field, value in recordFilter.items():
whereConditions.append(f'"{field}" = %s')
whereValues.append(value)
# Build the query
whereClause = ""
if whereConditions:
whereClause = " WHERE " + " AND ".join(whereConditions)
orderByClause = f' ORDER BY "{orderBy}"' if orderBy else ' ORDER BY "id"'
limitClause = f" LIMIT {limit}" if limit else ""
query = f'SELECT * FROM "{table}"{whereClause}{orderByClause}{limitClause}'
with connector.connection.cursor() as cursor:
cursor.execute(query, whereValues)
records = [dict(row) for row in cursor.fetchall()]
# Handle JSONB fields and ensure numeric types are correct
# Import the helper function from connector module
from modules.connectors.connectorDbPostgre import _get_model_fields
fields = _get_model_fields(modelClass)
for record in records:
for fieldName, fieldType in fields.items():
# Ensure numeric fields are properly typed
if fieldType in ("DOUBLE PRECISION", "INTEGER") and fieldName in record:
value = record[fieldName]
if value is not None:
try:
if fieldType == "DOUBLE PRECISION":
record[fieldName] = float(value)
elif fieldType == "INTEGER":
record[fieldName] = int(value)
except (ValueError, TypeError):
logger.warning(
f"Could not convert {fieldName} to {fieldType} for record {record.get('id', 'unknown')}: {value}"
)
elif fieldType == "JSONB" and fieldName in record:
if record[fieldName] is None:
if fieldName in ["logs", "messages", "tasks", "expectedDocumentFormats", "resultDocuments"]:
record[fieldName] = []
elif fieldName in ["execParameters", "stats"]:
record[fieldName] = {}
else:
record[fieldName] = None
else:
try:
if isinstance(record[fieldName], str):
record[fieldName] = json.loads(record[fieldName])
elif isinstance(record[fieldName], (dict, list)):
pass
else:
record[fieldName] = json.loads(str(record[fieldName]))
except (json.JSONDecodeError, TypeError, ValueError):
logger.warning(
f"Could not parse JSONB field {fieldName}, keeping as string: {record[fieldName]}"
)
return records
except Exception as e:
logger.error(f"Error loading records with RBAC from table {table}: {e}")
return []
def buildRbacWhereClause(
permissions: UserPermissions,
currentUser: User,
table: str,
connector # DatabaseConnector instance for connection access
) -> Optional[Dict[str, Any]]:
"""
Build RBAC WHERE clause based on permissions and access level.
Moved from connector to interfaces.
Args:
permissions: UserPermissions object
currentUser: User object
table: Table name
connector: DatabaseConnector instance (needed for GROUP queries)
Returns:
Dictionary with "condition" and "values" keys, or None if no filtering needed
"""
if not permissions or not hasattr(permissions, "read"):
return None
readLevel = permissions.read
# No access - return empty result condition
if readLevel == AccessLevel.NONE:
return {"condition": "1 = 0", "values": []}
# All records - no filtering needed
if readLevel == AccessLevel.ALL:
return None
# My records - filter by _createdBy or userId field
if readLevel == AccessLevel.MY:
# Try common field names for creator
userIdField = None
if table == "UserInDB":
userIdField = "id"
elif table == "UserConnection":
userIdField = "userId"
else:
userIdField = "_createdBy"
return {
"condition": f'"{userIdField}" = %s',
"values": [currentUser.id]
}
# Group records - filter by mandateId
if readLevel == AccessLevel.GROUP:
if not currentUser.mandateId:
logger.warning(f"User {currentUser.id} has no mandateId for GROUP access")
return {"condition": "1 = 0", "values": []}
# For UserInDB, filter by mandateId directly
if table == "UserInDB":
return {
"condition": '"mandateId" = %s',
"values": [currentUser.mandateId]
}
# For UserConnection, need to join with UserInDB or filter by mandateId in user
elif table == "UserConnection":
# Get all user IDs in the same mandate using direct SQL query
try:
with connector.connection.cursor() as cursor:
cursor.execute(
'SELECT "id" FROM "UserInDB" WHERE "mandateId" = %s',
(currentUser.mandateId,)
)
users = cursor.fetchall()
userIds = [u["id"] for u in users]
if not userIds:
return {"condition": "1 = 0", "values": []}
placeholders = ",".join(["%s"] * len(userIds))
return {
"condition": f'"userId" IN ({placeholders})',
"values": userIds
}
except Exception as e:
logger.error(f"Error building GROUP filter for UserConnection: {e}")
return {"condition": "1 = 0", "values": []}
# For other tables, filter by mandateId
else:
return {
"condition": '"mandateId" = %s',
"values": [currentUser.mandateId]
}
return None

View file

@ -8,7 +8,7 @@ from typing import Dict, Any, List
from fastapi import HTTPException, status
from modules.shared.configuration import APP_CONFIG
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceDbAppObjects import getRootInterface

View file

@ -10,7 +10,7 @@ import logging
# Import interfaces and models
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import User
# Configure logger
@ -88,7 +88,7 @@ async def sync_all_automation_events(
try:
from modules.interfaces.interfaceDbChatObjects import getInterface as getChatInterface
from modules.interfaces.interfaceDbAppObjects import getRootInterface
from modules.features.automation import syncAutomationEvents
from modules.features.workflow import syncAutomationEvents
chatInterface = getChatInterface(currentUser)
# Get event user for sync operation (routes can import from interfaces)
@ -100,7 +100,9 @@ async def sync_all_automation_events(
detail="Event user not available"
)
result = await syncAutomationEvents(chatInterface, eventUser)
from modules.services import getInterface as getServices
services = getServices(currentUser, None)
result = await syncAutomationEvents(services, eventUser)
return {
"success": True,
"synced": result.get("synced", 0),

View file

@ -7,7 +7,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, Body, Path, Reques
from typing import List, Dict, Any, Optional
import logging
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import User, UserInDB
from modules.datamodels.datamodelRbac import Role
from modules.interfaces.interfaceDbAppObjects import getInterface

View file

@ -3,7 +3,7 @@ from fastapi import status
import logging
# Import auth module
from modules.security.auth import limiter
from modules.auth import limiter
# Import the attribute definition and helper functions
from modules.shared.attributeUtils import getModelClasses, getModelAttributeDefinitions, AttributeResponse, AttributeDefinition

View file

@ -8,7 +8,7 @@ from typing import Optional, Dict, Any
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Request
# Import auth modules
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects
@ -18,7 +18,7 @@ from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, Wor
from modules.datamodels.datamodelUam import User
# Import workflow control functions
from modules.features.chatPlayground.mainChatPlayground import chatStart, chatStop
from modules.features.workflow import chatStart, chatStop
# Configure logger
logger = logging.getLogger(__name__)

View file

@ -6,16 +6,18 @@ Implements the endpoints for automation definition management.
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query
from typing import List, Dict, Any, Optional
from fastapi import status
from fastapi.responses import JSONResponse
import logging
import json
# Import interfaces and models
from modules.interfaces.interfaceDbChatObjects import getInterface as getChatInterface
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelChat import AutomationDefinition, ChatWorkflow
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata
from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.features.automation import executeAutomation
from modules.features.workflow import executeAutomation
from modules.features.workflow.subAutomationTemplates import getAutomationTemplates
# Configure logger
logger = logging.getLogger(__name__)
@ -121,6 +123,33 @@ async def create_automation(
detail=f"Error creating automation: {str(e)}"
)
@router.get("/templates")
@limiter.limit("30/minute")
async def get_automation_templates(
request: Request,
currentUser = Depends(getCurrentUser)
) -> JSONResponse:
"""
Get automation templates from backend module.
The UI should fetch these templates regularly to get the latest versions.
"""
try:
templatesData = getAutomationTemplates()
return JSONResponse(content=templatesData)
except Exception as e:
logger.error(f"Error getting automation templates: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Error getting automation templates: {str(e)}"
)
@router.get("/attributes", response_model=Dict[str, Any])
async def get_automation_attributes(
request: Request
) -> Dict[str, Any]:
"""Get attribute definitions for AutomationDefinition model"""
return {"attributes": automationAttributes}
@router.get("/{automationId}", response_model=AutomationDefinition)
@limiter.limit("30/minute")
async def get_automation(
@ -217,8 +246,9 @@ async def execute_automation(
) -> ChatWorkflow:
"""Execute an automation immediately (test mode)"""
try:
chatInterface = getChatInterface(currentUser)
workflow = await executeAutomation(automationId, chatInterface)
from modules.services import getInterface as getServices
services = getServices(currentUser, None)
workflow = await executeAutomation(automationId, services)
return workflow
except HTTPException:
raise
@ -234,10 +264,4 @@ async def execute_automation(
detail=f"Error executing automation: {str(e)}"
)
@router.get("/attributes", response_model=Dict[str, Any])
async def get_automation_attributes(
request: Request
) -> Dict[str, Any]:
"""Get attribute definitions for AutomationDefinition model"""
return {"attributes": automationAttributes}

View file

@ -16,7 +16,7 @@ import json
from modules.datamodels.datamodelUam import User, UserConnection, AuthAuthority, ConnectionStatus
from modules.datamodels.datamodelSecurity import Token
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.interfaces.interfaceDbAppObjects import getInterface
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
@ -107,7 +107,7 @@ async def get_connections(
# Perform silent token refresh for expired OAuth connections
try:
from modules.security.tokenRefreshService import token_refresh_service
from modules.auth import token_refresh_service
refresh_result = await token_refresh_service.refresh_expired_tokens(currentUser.id)
if refresh_result.get("refreshed", 0) > 0:
logger.info(f"Silently refreshed {refresh_result['refreshed']} tokens for user {currentUser.id}")

View file

@ -5,7 +5,7 @@ import logging
import json
# Import auth module
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbComponentObjects as interfaceDbComponentObjects

View file

@ -10,7 +10,7 @@ import logging
import json
# Import auth module
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects

View file

@ -3,7 +3,7 @@ from typing import List, Dict, Any, Optional
import logging
# Import auth module
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
from modules.datamodels.datamodelUam import User

View file

@ -5,7 +5,7 @@ import logging
import json
# Import auth module
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbComponentObjects as interfaceDbComponentObjects

View file

@ -11,7 +11,7 @@ import json
# Import interfaces and models
import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects
from modules.security.auth import getCurrentUser, limiter, getCurrentUser
from modules.auth import getCurrentUser, limiter
# Import the attribute definition and helper functions
from modules.datamodels.datamodelUam import User

View file

@ -0,0 +1,464 @@
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Query
from typing import List, Dict, Any, Optional
from fastapi import status
import logging
import json
# Import auth module
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbComponentObjects as interfaceDbComponentObjects
from modules.datamodels.datamodelMessaging import (
MessagingSubscription,
MessagingSubscriptionRegistration,
MessagingDelivery,
MessagingChannel,
MessagingEventParameters,
MessagingSubscriptionExecutionResult
)
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata
# Configure logger
logger = logging.getLogger(__name__)
# Create router for messaging endpoints
router = APIRouter(
prefix="/api/messaging",
tags=["Messaging"],
responses={404: {"description": "Not found"}}
)
# Subscription Endpoints
@router.get("/subscriptions", response_model=PaginatedResponse[MessagingSubscription])
@limiter.limit("60/minute")
async def getSubscriptions(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscription]:
"""Get subscriptions with optional pagination, sorting, and filtering."""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
result = managementInterface.getAllSubscriptions(pagination=paginationParams)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.post("/subscriptions", response_model=MessagingSubscription)
@limiter.limit("60/minute")
async def createSubscription(
request: Request,
subscription: MessagingSubscription,
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Create a new subscription"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
subscriptionData = subscription.model_dump(exclude={"id"})
newSubscription = managementInterface.createSubscription(subscriptionData)
return MessagingSubscription(**newSubscription)
@router.get("/subscriptions/{subscriptionId}", response_model=MessagingSubscription)
@limiter.limit("60/minute")
async def getSubscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Get a specific subscription"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
subscription = managementInterface.getSubscription(subscriptionId)
if not subscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
return subscription
@router.put("/subscriptions/{subscriptionId}", response_model=MessagingSubscription)
@limiter.limit("60/minute")
async def updateSubscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to update"),
subscriptionData: MessagingSubscription = Body(...),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Update an existing subscription"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
existingSubscription = managementInterface.getSubscription(subscriptionId)
if not existingSubscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
updateData = subscriptionData.model_dump(exclude={"id", "subscriptionId"})
updatedSubscription = managementInterface.updateSubscription(subscriptionId, updateData)
if not updatedSubscription:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error updating the subscription"
)
return MessagingSubscription(**updatedSubscription)
@router.delete("/subscriptions/{subscriptionId}", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def deleteSubscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to delete"),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Delete a subscription"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
existingSubscription = managementInterface.getSubscription(subscriptionId)
if not existingSubscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
success = managementInterface.deleteSubscription(subscriptionId)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error deleting the subscription"
)
return {"message": f"Subscription with ID {subscriptionId} successfully deleted"}
# Registration Endpoints
@router.get("/subscriptions/{subscriptionId}/registrations", response_model=PaginatedResponse[MessagingSubscriptionRegistration])
@limiter.limit("60/minute")
async def getSubscriptionRegistrations(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscriptionRegistration]:
"""Get registrations for a subscription"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
result = managementInterface.getAllRegistrations(
subscriptionId=subscriptionId,
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.post("/subscriptions/{subscriptionId}/subscribe", response_model=MessagingSubscriptionRegistration)
@limiter.limit("60/minute")
async def subscribeUser(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
channel: MessagingChannel = Body(..., embed=True),
channelConfig: str = Body(..., embed=True),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscriptionRegistration:
"""Subscribe user to a subscription with a specific channel"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
registration = managementInterface.subscribeUser(
subscriptionId=subscriptionId,
userId=currentUser.id,
channel=channel,
channelConfig=channelConfig
)
return MessagingSubscriptionRegistration(**registration)
@router.delete("/subscriptions/{subscriptionId}/unsubscribe", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def unsubscribeUser(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
channel: MessagingChannel = Body(..., embed=True),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Unsubscribe user from a subscription for a specific channel"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
success = managementInterface.unsubscribeUser(
subscriptionId=subscriptionId,
userId=currentUser.id,
channel=channel
)
if not success:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Registration not found"
)
return {"message": f"Successfully unsubscribed from {subscriptionId} for channel {channel.value}"}
@router.get("/registrations", response_model=PaginatedResponse[MessagingSubscriptionRegistration])
@limiter.limit("60/minute")
async def getMyRegistrations(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscriptionRegistration]:
"""Get own registrations"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
result = managementInterface.getAllRegistrations(
userId=currentUser.id,
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.put("/registrations/{registrationId}", response_model=MessagingSubscriptionRegistration)
@limiter.limit("60/minute")
async def updateRegistration(
request: Request,
registrationId: str = Path(..., description="ID of the registration to update"),
registrationData: MessagingSubscriptionRegistration = Body(...),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscriptionRegistration:
"""Update a registration"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
existingRegistration = managementInterface.getRegistration(registrationId)
if not existingRegistration:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Registration with ID {registrationId} not found"
)
updateData = registrationData.model_dump(exclude={"id", "subscriptionId", "userId"})
updatedRegistration = managementInterface.updateRegistration(registrationId, updateData)
if not updatedRegistration:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error updating the registration"
)
return MessagingSubscriptionRegistration(**updatedRegistration)
@router.delete("/registrations/{registrationId}", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def deleteRegistration(
request: Request,
registrationId: str = Path(..., description="ID of the registration to delete"),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Delete a registration"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
existingRegistration = managementInterface.getRegistration(registrationId)
if not existingRegistration:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Registration with ID {registrationId} not found"
)
success = managementInterface.deleteRegistration(registrationId)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error deleting the registration"
)
return {"message": f"Registration with ID {registrationId} successfully deleted"}
# Trigger Endpoints
def _getTriggerKey(request: Request) -> str:
"""Custom key function for trigger rate limiting per subscriptionId"""
subscriptionId = request.path_params.get("subscriptionId", "unknown")
return f"{request.client.host}:{subscriptionId}"
@router.post("/trigger/{subscriptionId}", response_model=MessagingSubscriptionExecutionResult)
@limiter.limit("60/minute", key_func=_getTriggerKey)
async def triggerSubscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to trigger"),
eventParameters: Dict[str, Any] = Body(...),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscriptionExecutionResult:
"""Trigger a subscription with event parameters"""
# RBAC-Check: Nur Admin/Mandate-Admin kann triggern
# TODO: Add proper RBAC check here
# Get messaging service from request app state
# We need to access services through the request
from modules.services import getInterface as getServicesInterface
services = getServicesInterface(currentUser, None)
# Konvertiere Dict zu Pydantic Model
eventParams = MessagingEventParameters(triggerData=eventParameters)
executionResult = services.messaging.executeSubscription(subscriptionId, eventParams)
return executionResult
# Delivery Endpoints
@router.get("/deliveries", response_model=PaginatedResponse[MessagingDelivery])
@limiter.limit("60/minute")
async def getDeliveries(
request: Request,
subscriptionId: Optional[str] = Query(None, description="Filter by subscription ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingDelivery]:
"""Get delivery history"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
result = managementInterface.getDeliveries(
subscriptionId=subscriptionId,
userId=currentUser.id, # Users can only see their own deliveries
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.get("/deliveries/{deliveryId}", response_model=MessagingDelivery)
@limiter.limit("60/minute")
async def getDelivery(
request: Request,
deliveryId: str = Path(..., description="ID of the delivery"),
currentUser: User = Depends(getCurrentUser)
) -> MessagingDelivery:
"""Get a specific delivery"""
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
delivery = managementInterface.getDelivery(deliveryId)
if not delivery:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Delivery with ID {deliveryId} not found"
)
return delivery

View file

@ -7,9 +7,10 @@ from fastapi import APIRouter, HTTPException, Depends, Query, Request
from typing import List, Dict, Any
import logging
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import User
from modules.features.options.mainOptions import getOptions, getAvailableOptionsNames
from modules.features.dynamicOptions.mainDynamicOptions import getOptions, getAvailableOptionsNames
from modules.services import getInterface as getServices
# Configure logger
logger = logging.getLogger(__name__)
@ -44,7 +45,8 @@ async def getOptionsEndpoint(
- GET /api/options/connection.status
"""
try:
options = getOptions(optionsName, currentUser)
services = getServices(currentUser, None)
options = getOptions(optionsName, services, currentUser)
return options
except ValueError as e:
raise HTTPException(

View file

@ -7,7 +7,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, Body, Path, Reques
from typing import Optional, List, Dict, Any
import logging
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
from modules.datamodels.datamodelRbac import AccessRuleContext, AccessRule, Role
from modules.interfaces.interfaceDbAppObjects import getInterface

View file

@ -4,7 +4,7 @@ from typing import Optional, Dict, Any, List
import os
import logging
from modules.security.auth import getCurrentUser, limiter
from modules.auth import getCurrentUser, limiter
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.datamodels.datamodelSecurity import Token

View file

@ -13,8 +13,8 @@ import httpx
from modules.shared.configuration import APP_CONFIG
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatus, UserConnection
from modules.security.auth import getCurrentUser, limiter
from modules.security.jwtService import createAccessToken, setAccessTokenCookie, createRefreshToken, setRefreshTokenCookie
from modules.auth import getCurrentUser, limiter
from modules.auth import createAccessToken, setAccessTokenCookie, createRefreshToken, setRefreshTokenCookie
from modules.shared.timeUtils import createExpirationTimestamp, getUtcTimestamp, parseTimestamp
# Configure logger
@ -353,7 +353,7 @@ async def auth_callback(code: str, state: str, request: Request, response: Respo
# Decode token to get jti for database record
from jose import jwt
from modules.security.auth import SECRET_KEY, ALGORITHM
from modules.auth import SECRET_KEY, ALGORITHM
payload = jwt.decode(jwt_token, SECRET_KEY, algorithms=[ALGORITHM])
jti = payload.get("jti")
@ -659,7 +659,7 @@ async def verify_token(
)
# Get a fresh token via TokenManager convenience method
from modules.security.tokenManager import TokenManager
from modules.auth import TokenManager
current_token = TokenManager().getFreshToken(google_connection.id)
if not current_token:
@ -733,7 +733,7 @@ async def refresh_token(
logger.debug(f"Found Google connection: {google_connection.id}, status={google_connection.status}")
# Get the token for this specific connection (fresh if expiring soon)
from modules.security.tokenManager import TokenManager
from modules.auth import TokenManager
current_token = TokenManager().getFreshToken(google_connection.id)
if not current_token:

View file

@ -12,8 +12,8 @@ import uuid
from jose import jwt
# Import auth modules
from modules.security.auth import getCurrentUser, limiter, SECRET_KEY, ALGORITHM
from modules.security.jwtService import createAccessToken, createRefreshToken, setAccessTokenCookie, setRefreshTokenCookie, clearAccessTokenCookie, clearRefreshTokenCookie
from modules.auth import getCurrentUser, limiter, SECRET_KEY, ALGORITHM
from modules.auth import createAccessToken, createRefreshToken, setAccessTokenCookie, setRefreshTokenCookie, clearAccessTokenCookie, clearRefreshTokenCookie
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.datamodels.datamodelSecurity import Token

View file

@ -14,8 +14,8 @@ from modules.shared.configuration import APP_CONFIG
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatus, UserConnection
from modules.datamodels.datamodelSecurity import Token
from modules.security.auth import getCurrentUser, limiter
from modules.security.jwtService import createAccessToken, setAccessTokenCookie, createRefreshToken, setRefreshTokenCookie
from modules.auth import getCurrentUser, limiter
from modules.auth import createAccessToken, setAccessTokenCookie, createRefreshToken, setRefreshTokenCookie
from modules.shared.timeUtils import createExpirationTimestamp, getUtcTimestamp, parseTimestamp
# Configure logger
@ -335,7 +335,7 @@ async def auth_callback(code: str, state: str, request: Request, response: Respo
# Decode token to get jti for database record
from jose import jwt
from modules.security.auth import SECRET_KEY, ALGORITHM
from modules.auth import SECRET_KEY, ALGORITHM
payload = jwt.decode(jwt_token, SECRET_KEY, algorithms=[ALGORITHM])
jti = payload.get("jti")
@ -692,7 +692,7 @@ async def refresh_token(
logger.debug(f"Found Microsoft connection: {msft_connection.id}, status={msft_connection.status}")
# Get a fresh token via TokenManager convenience method
from modules.security.tokenManager import TokenManager
from modules.auth import TokenManager
current_token = TokenManager().getFreshToken(msft_connection.id)
if not current_token:
@ -704,7 +704,7 @@ async def refresh_token(
# Always attempt refresh (as per your requirement)
from modules.security.tokenManager import TokenManager
from modules.auth import TokenManager
token_manager = TokenManager()
refreshedToken = token_manager.refreshToken(current_token)

View file

@ -7,7 +7,7 @@ import logging
from typing import List, Dict, Any, Optional
from fastapi import APIRouter, HTTPException, Depends, Path, Query, Request, status
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
from modules.datamodels.datamodelUam import User, UserConnection
from modules.interfaces.interfaceDbAppObjects import getInterface
from modules.services import getInterface as getServices

View file

@ -10,7 +10,7 @@ import base64
from fastapi import APIRouter, File, Form, UploadFile, Depends, HTTPException, Body, WebSocket, WebSocketDisconnect
from fastapi.responses import Response
from typing import Optional, Dict, Any, List
from modules.security.auth import getCurrentUser
from modules.auth import getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface, VoiceObjects

View file

@ -9,11 +9,12 @@ from typing import List, Dict, Any, Optional
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Response, status, Request
# Import auth modules
from modules.security.auth import limiter, getCurrentUser
from modules.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects
from modules.interfaces.interfaceDbChatObjects import getInterface
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
# Import models
from modules.datamodels.datamodelChat import (
@ -428,7 +429,8 @@ async def delete_workflow(
interfaceDbChat = getServiceChat(currentUser)
# Check workflow access and permission using RBAC
workflows = interfaceDbChat.db.getRecordsetWithRBAC(
workflows = getRecordsetWithRBAC(
interfaceDbChat.db,
ChatWorkflow,
currentUser,
recordFilter={"id": workflowId}

View file

@ -0,0 +1,21 @@
"""
Security core modules for low-level security operations.
Used by connectors and interfaces. Does not depend on FastAPI or interfaces.
"""
from .rbac import RbacClass
from .rbacHelpers import (
checkResourceAccess,
checkUiAccess,
checkDataAccess,
getResourcePermissions,
getUiPermissions,
)
from .rootAccess import getRootDbAppConnector, getRootUser
__all__ = [
"RbacClass",
"getRootDbAppConnector",
"getRootUser",
]

View file

@ -8,7 +8,6 @@ from typing import Optional
from modules.datamodels.datamodelUam import User, AccessLevel
from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.security.rbac import RbacClass
from modules.connectors.connectorDbPostgre import DatabaseConnector
logger = logging.getLogger(__name__)
@ -176,3 +175,4 @@ def getUiPermissions(
"view": False,
"hasAccess": False
}

View file

@ -0,0 +1,57 @@
"""
Root access management for system-level operations.
Provides secure access to root user and DbApp database connector.
"""
import logging
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.datamodels.datamodelUam import User, UserInDB
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
_rootDbAppConnector = None
_rootUser = None
def getRootDbAppConnector() -> DatabaseConnector:
"""
Returns a DatabaseConnector instance for the DbApp database.
This is used for accessing system tables like AccessRule.
"""
global _rootDbAppConnector
if _rootDbAppConnector is None:
_rootDbAppConnector = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_APP_HOST"),
dbDatabase=APP_CONFIG.get("DB_APP_DATABASE", "app"),
dbUser=APP_CONFIG.get("DB_APP_USER"),
dbPassword=APP_CONFIG.get("DB_APP_PASSWORD_SECRET"),
dbPort=int(APP_CONFIG.get("DB_APP_PORT", 5432)),
userId=None # No user context for root connector
)
_rootDbAppConnector.initDbSystem()
return _rootDbAppConnector
def getRootUser() -> User:
"""
Returns the root user (initial user from database).
Used for system-level operations that require root privileges.
"""
global _rootUser
if _rootUser is None:
dbApp = getRootDbAppConnector()
initialUserId = dbApp.getInitialId(UserInDB)
if not initialUserId:
raise ValueError("No initial user ID found in database")
users = dbApp.getRecordset(UserInDB, recordFilter={"id": initialUserId})
if not users:
raise ValueError("Initial user not found in database")
user_data = users[0]
_rootUser = User(**user_data)
return _rootUser

View file

@ -87,6 +87,9 @@ class Services:
from .serviceSecurity.mainServiceSecurity import SecurityService
self.security = PublicService(SecurityService(self))
from .serviceMessaging.mainServiceMessaging import MessagingService
self.messaging = PublicService(MessagingService(self))
def getInterface(user: User, workflow: ChatWorkflow) -> Services:
return Services(user, workflow)

View file

@ -0,0 +1,674 @@
# Messaging Service Konzept
## Übersicht
Das Messaging-System ermöglicht es, Nachrichten über verschiedene Kanäle (E-Mail, SMS, WhatsApp, Teams Chat, etc.) an registrierte Benutzer zu senden, basierend auf Subscriptions. Es ist mandantenbasiert und unterstützt mehrere Kanäle pro Subscription.
## Architektur-Überlegungen
### Kernkonzept
Das System besteht aus zwei Hauptkomponenten:
1. **Subscription-Management**: Users können sich für Subscriptions registrieren und ihre bevorzugten Kanäle wählen
2. **Subscription-Funktionen**: Jede Subscription hat eine eigene Funktion (`subSubscriptionXxxx.py`), die komplett flexibel ist und die Nachrichten vorbereitet
### Setup-Architektur
**Wichtig**: Die Datenbank mit den Subscriptions ist die **stabile Basis-Referenz** und die Grundlage des Systems.
Es gibt zwei Seiten, die an die Datenbank andocken:
1. **User-Seite**: Users können sich für Subscriptions registrieren (subscribe)
- Dies ist unabhängig davon, ob bereits eine Subscription-Funktion existiert
- Users können sich bereits subscriben, bevor eine Funktion implementiert ist
2. **Funktions-Seite**: Subscription-Funktionen können später hinzugefügt werden
- Eine Subscription-Funktion ist **optional** und kann nachträglich implementiert werden
- Wenn eine Subscription-Funktion fehlt, wird beim Trigger ein Fehler geloggt, aber das System bleibt stabil
- Die Datenbank-Struktur ist unabhängig von der Existenz der Funktionen
**Workflow**:
1. Admin erstellt Subscription in der Datenbank (z.B. "SystemErrors")
2. Users können sich sofort für diese Subscription registrieren
3. Später kann die Subscription-Funktion (`subSubscriptionSystemErrors.py`) hinzugefügt werden
4. Erst dann können Trigger die Subscription ausführen
### Trigger-Mechanismus
Subscriptions können über verschiedene Trigger ausgeführt werden:
- **Trigger-Route-Endpunkt**: API-Endpunkt, der eine Subscription triggert
- **Workflow-Action**: Automatischer Workflow, der eine Subscription als Event auslöst
- **Scheduled Job**: Zeitgesteuerte Ausführung
- **Event-basiert**: System-Events (z.B. Audit-Log-Events)
**Wichtig**: Eine Subscription kann über **alle** Trigger-Typen ausgeführt werden. Es gibt keine Einschränkung pro Subscription.
### Subscription-Funktionen
Jede Subscription hat eine eigene Funktion im Format `subSubscriptionXxxx.py` (z.B. `subSubscriptionSystemErrors.py`, `subSubscriptionAuditLogin.py`).
**Naming-Regel**: Die `subscriptionId` muss nur Buchstaben und Unterstriche (`_`) enthalten. Sie wird direkt als Dateiname verwendet:
- `subscriptionId`: "SystemErrors" → Datei: `subSubscriptionSystemErrors.py`
- `subscriptionId`: "audit_login" → Datei: `subSubscriptionAuditLogin.py`
**Validierung**: Bei der Erstellung einer Subscription wird geprüft, dass `subscriptionId` nur Buchstaben und `_` enthält.
Diese Funktionen:
- Erhalten Event-Parameter vom Trigger (als Pydantic Model)
- Erhalten bereits die Registrierungen (werden vor dem Funktionsaufruf geholt)
- Bereiten die Nachrichten vor (können pro Kanal unterschiedlich sein)
- Rufen `sendMessage` für jeden Channel auf
- Haben vollständige Flexibilität bei der Nachrichtenerstellung
**Beispiel-Struktur:**
```python
# modules/services/serviceMessaging/subscriptions/subSubscriptionSystemErrors.py
from typing import List
from modules.datamodels.datamodelMessaging import (
MessagingEventParameters,
MessagingSubscriptionExecutionResult,
MessagingSubscriptionRegistration,
MessagingChannel
)
def execute(
eventParameters: MessagingEventParameters,
registrations: List[MessagingSubscriptionRegistration],
messagingService
) -> MessagingSubscriptionExecutionResult:
"""
Subscription-Funktion für System-Errors.
Erhält eventParameters vom Trigger und registrations bereits geholt.
"""
# Gruppiere nach Channel
emailRegistrations = [r for r in registrations if r.channel == MessagingChannel.EMAIL]
smsRegistrations = [r for r in registrations if r.channel == MessagingChannel.SMS]
# Bereite Nachrichten vor (können pro Channel unterschiedlich sein)
emailSubject = "System Error Report"
errors = eventParameters.triggerData.get('errors', [])
emailMessage = f"System errors detected: {errors}"
smsMessage = f"System Error: {len(errors)} errors detected"
messagesSent = 0
# Versende über sendMessage
for reg in emailRegistrations:
sendResult = messagingService.sendMessage(
subject=emailSubject,
message=emailMessage,
registration=reg
)
if sendResult.success:
messagesSent += 1
for reg in smsRegistrations:
sendResult = messagingService.sendMessage(
subject="", # SMS hat kein Subject
message=smsMessage,
registration=reg
)
if sendResult.success:
messagesSent += 1
return MessagingSubscriptionExecutionResult(
success=True,
messagesSent=messagesSent
)
```
## Datenmodell
### 1. MessagingChannel (Enum)
```python
class MessagingChannel(str, Enum):
EMAIL = "email"
SMS = "sms"
WHATSAPP = "whatsapp"
TEAMS_CHAT = "teams_chat"
# Weitere Kanäle können hier hinzugefügt werden
```
### 2. MessagingSubscription
- `id`: UUID
- `subscriptionId`: String (eindeutiger Identifier, z.B. "SystemErrors", "audit_login")
- **Validierung**: Nur Buchstaben und `_` erlaubt
- `subscriptionLabel`: String (Anzeigename)
- `mandateId`: String (Mandanten-ID - wird automatisch vom Interface gesetzt)
- `description`: Optional[String]
- `isSystemSubscription`: Boolean (nur Admin kann System-Subscriptions erstellen)
- `enabled`: Boolean
- System-Felder: `creationDate`, `lastModified`, `createdBy`, `modifiedBy`
### 3. MessagingSubscriptionRegistration
- `id`: UUID
- `subscriptionId`: String (Referenz zur Subscription)
- `userId`: String (Referenz zum User)
- `channel`: MessagingChannel
- `channelConfig`: String (z.B. E-Mail-Adresse, Telefonnummer, Teams User ID)
- `enabled`: Boolean (User kann sich temporär deaktivieren)
- System-Felder: `creationDate`, `lastModified`
### 4. MessagingDelivery
- `id`: UUID
- `subscriptionId`: String (Referenz zur Subscription)
- `userId`: String (Referenz zum User)
- `channel`: MessagingChannel
- `status`: Enum (PENDING, SENT, FAILED)
- `errorMessage`: Optional[String]
- `sentAt`: Optional[Float] (Timestamp wenn gesendet)
- System-Felder: `creationDate`
### 5. MessagingEventParameters (Pydantic Model)
- `triggerData`: dict - Event-Daten vom Trigger als Dictionary/JSON
### 6. MessagingSendResult (Pydantic Model)
- `success`: Boolean
- `deliveryId`: Optional[String] (ID des MessagingDelivery Records)
- `errorMessage`: Optional[String]
### 7. MessagingSubscriptionExecutionResult (Pydantic Model)
- `success`: Boolean
- `messagesSent`: Integer
- `errorMessage`: Optional[String]
- `extra="allow"` für zusätzliche Felder
## RBAC-Berechtigungsmodell
### Access Rules für MessagingSubscription
- **Context**: `DATA`
- **Item**: `MessagingSubscription`
- **Permissions**:
- **Admin**: Alle CRUD-Operationen auf alle Subscriptions
- **Mandate-Admin**: CRUD auf Subscriptions des eigenen Mandanten (außer System-Subscriptions)
- **User**: Read auf Subscriptions des eigenen Mandanten, Create/Update/Delete nur auf eigene Registrierungen
### Access Rules für MessagingSubscriptionRegistration
- **Context**: `DATA`
- **Item**: `MessagingSubscriptionRegistration`
- **Permissions**:
- **User**: CRUD auf eigene Registrierungen
- **Admin/Mandate-Admin**: Read auf alle Registrierungen des Mandanten
### Access Rules für MessagingDelivery
- **Context**: `DATA`
- **Item**: `MessagingDelivery`
- **Permissions**:
- **Admin/Mandate-Admin**: Read auf alle Deliveries
- **User**: Read auf eigene Deliveries
## Service-Architektur
### serviceMessaging/mainServiceMessaging.py
```python
class MessagingService:
def __init__(self, services):
self.services = services
self._messagingInterface = None # interfaceMessaging
# Core Messaging
def sendMessage(
self,
subject: str,
message: str,
registration: MessagingSubscriptionRegistration
) -> MessagingSendResult:
"""
Sendet eine Nachricht über einen Channel an einen User.
Erstellt MessagingDelivery Record.
Args:
subject: Subject der Nachricht (für E-Mail, leer für SMS)
message: Nachrichtentext
registration: MessagingSubscriptionRegistration mit Channel-Info und userId
Returns:
MessagingSendResult mit Status und Delivery-ID
"""
# Erstelle Delivery Record
delivery = MessagingDelivery(
subscriptionId=registration.subscriptionId,
userId=registration.userId,
channel=registration.channel,
status=DeliveryStatus.PENDING
)
# Speichere Delivery Record
deliveryRecord = self.services.interfaceDbComponent.createDelivery(delivery)
try:
# Versende über interfaceMessaging
success = self._getMessagingInterface().send(
channel=registration.channel,
recipient=registration.channelConfig,
subject=subject,
message=message
)
if success:
# Update Delivery Record
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.SENT,
"sentAt": getUtcTimestamp()
}
)
return MessagingSendResult(
success=True,
deliveryId=deliveryRecord["id"]
)
else:
# Update Delivery Record mit Fehler
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.FAILED,
"errorMessage": "Failed to send message"
}
)
return MessagingSendResult(
success=False,
deliveryId=deliveryRecord["id"],
errorMessage="Failed to send message"
)
except Exception as e:
logger.error(f"Error sending message: {str(e)}")
# Update Delivery Record mit Fehler
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.FAILED,
"errorMessage": str(e)
}
)
return MessagingSendResult(
success=False,
deliveryId=deliveryRecord["id"],
errorMessage=str(e)
)
def executeSubscription(
self,
subscriptionId: str,
eventParameters: MessagingEventParameters
) -> MessagingSubscriptionExecutionResult:
"""
Führt eine Subscription-Funktion aus.
Args:
subscriptionId: ID der Subscription
eventParameters: Parameter vom Trigger (als Pydantic Model)
Returns:
MessagingSubscriptionExecutionResult
Raises:
ValueError: Wenn Subscription nicht existiert oder nicht enabled ist
FileNotFoundError: Wenn Subscription-Funktion nicht gefunden wird
"""
# Prüfe ob Subscription existiert und enabled ist
subscription = self.services.interfaceDbComponent.getSubscription(subscriptionId)
if not subscription:
raise ValueError(f"Subscription {subscriptionId} not found")
if not subscription.enabled:
logger.warning(f"Subscription {subscriptionId} is disabled, skipping execution")
return MessagingSubscriptionExecutionResult(
success=False,
messagesSent=0,
errorMessage="Subscription is disabled"
)
# Hole alle aktiven Registrierungen für diese Subscription
registrations = self._getSubscribers(subscriptionId)
if not registrations:
logger.info(f"No active registrations for subscription {subscriptionId}")
return MessagingSubscriptionExecutionResult(
success=True,
messagesSent=0
)
# Lade Subscription-Funktion dynamisch
subscriptionFunction = self._loadSubscriptionFunction(subscriptionId)
if not subscriptionFunction:
errorMsg = f"Subscription function not found for {subscriptionId}"
logger.error(errorMsg)
raise FileNotFoundError(errorMsg)
# Führe Funktion aus mit Registrierungen
try:
return subscriptionFunction.execute(eventParameters, registrations, self)
except Exception as e:
logger.error(f"Error executing subscription {subscriptionId}: {str(e)}", exc_info=True)
return MessagingSubscriptionExecutionResult(
success=False,
messagesSent=0,
errorMessage=str(e)
)
# Helper Methods
def _getSubscribers(
self,
subscriptionId: str,
channel: Optional[MessagingChannel] = None
) -> List[MessagingSubscriptionRegistration]:
"""Holt alle aktiven Subscriber einer Subscription"""
return self.services.interfaceDbComponent.getAllRegistrations(
subscriptionId=subscriptionId,
filters={"enabled": True} if not channel else {"enabled": True, "channel": channel.value}
)
def _loadSubscriptionFunction(self, subscriptionId: str) -> Optional[Callable]:
"""
Lädt die Subscription-Funktion dynamisch.
Returns:
Callable mit execute-Methode oder None wenn nicht gefunden
Note:
subscriptionId wird direkt als Dateiname verwendet (z.B. "SystemErrors" -> subSubscriptionSystemErrors.py)
"""
# Format: subSubscription{subscriptionId}.py
functionName = f"subSubscription{subscriptionId}"
moduleName = f"modules.services.serviceMessaging.subscriptions.{functionName}"
try:
# Dynamisches Import
import importlib
subscriptionModule = importlib.import_module(moduleName)
return subscriptionModule
except ImportError:
# Funktion existiert noch nicht - das ist OK
logger.debug(f"Subscription function {moduleName} not found (this is OK if not yet implemented)")
return None
def _getMessagingInterface(self):
"""Holt das Messaging-Interface (interfaceMessaging)"""
if not self._messagingInterface:
from modules.interfaces.interfaceMessaging import getInterface
self._messagingInterface = getInterface()
return self._messagingInterface
```
## Connector-Architektur
### Überblick
Für jeden Channel gibt es einen separaten Connector in `modules/connectors/`:
- `connectorMessagingEmail.py` - Azure Communication Services
- `connectorMessagingSms.py` - Twilio
- `connectorMessagingWhatsapp.py` - WhatsApp API (zukünftig)
- `connectorMessagingTeams.py` - Microsoft Teams API (zukünftig)
### Interface: interfaceMessaging.py
Das Interface `modules/interfaces/interfaceMessaging.py` stellt eine einheitliche Schnittstelle bereit, die alle Connectors nach dem gleichen Schema verwendet:
```python
class MessagingInterface:
def send(
self,
channel: MessagingChannel,
recipient: str,
subject: str,
message: str
) -> bool:
"""
Sendet eine Nachricht über den angegebenen Channel.
Args:
channel: MessagingChannel Enum
recipient: Empfänger-Adresse (E-Mail, Telefonnummer, etc.)
subject: Betreff (für E-Mail, leer für SMS)
message: Nachrichtentext
Returns:
bool: True wenn erfolgreich, False bei Fehler
"""
# Wähle Connector basierend auf Channel
if channel == MessagingChannel.EMAIL:
connector = ConnectorMessagingEmail()
elif channel == MessagingChannel.SMS:
connector = ConnectorMessagingSms()
elif channel == MessagingChannel.WHATSAPP:
connector = ConnectorMessagingWhatsapp()
elif channel == MessagingChannel.TEAMS_CHAT:
connector = ConnectorMessagingTeams()
else:
logger.error(f"Unknown channel: {channel}")
return False
# Rufe Connector mit einheitlichem Schema auf
return connector.send(recipient=recipient, subject=subject, message=message)
```
### Connector-Struktur
Jeder Connector implementiert die gleiche Schnittstelle:
```python
# modules/connectors/connectorMessagingEmail.py
class ConnectorMessagingEmail:
def __init__(self):
# Initialisiere Azure Communication Services Client
pass
def send(self, recipient: str, subject: str, message: str) -> bool:
"""
Sendet E-Mail über Azure Communication Services.
Args:
recipient: E-Mail-Adresse
subject: Betreff
message: Nachrichtentext (kann HTML enthalten)
Returns:
bool: True wenn erfolgreich
"""
# Implementierung hier
pass
```
```python
# modules/connectors/connectorMessagingSms.py
class ConnectorMessagingSms:
def __init__(self):
# Initialisiere Twilio Client
pass
def send(self, recipient: str, subject: str, message: str) -> bool:
"""
Sendet SMS über Twilio.
Args:
recipient: Telefonnummer (mit Ländercode)
subject: Wird ignoriert (SMS hat kein Subject)
message: Nachrichtentext
Returns:
bool: True wenn erfolgreich
"""
# Implementierung hier
pass
```
**Vorteile**:
- Einheitliches Schema für alle Channels
- Einfache Erweiterung um neue Channels
- Klare Trennung zwischen Service-Logik und Channel-Implementierung
- Connectors können unabhängig getestet werden
## Interface-Methoden (interfaceDbComponentObjects.py)
```python
# Subscription Management
def getAllSubscriptions(self, pagination: Optional[PaginationParams] = None) -> Union[List[MessagingSubscription], PaginatedResult]
def getSubscription(self, subscriptionId: str) -> Optional[MessagingSubscription]
def getSubscriptionById(self, id: str) -> Optional[MessagingSubscription] # By UUID
def createSubscription(self, subscriptionData: Dict[str, Any]) -> Dict[str, Any]
def updateSubscription(self, subscriptionId: str, updateData: Dict[str, Any]) -> Dict[str, Any]
def deleteSubscription(self, subscriptionId: str) -> bool
# Registration Management
def getAllRegistrations(self, subscriptionId: Optional[str] = None, userId: Optional[str] = None,
pagination: Optional[PaginationParams] = None) -> Union[List[MessagingSubscriptionRegistration], PaginatedResult]
def getRegistration(self, registrationId: str) -> Optional[MessagingSubscriptionRegistration]
def createRegistration(self, registrationData: Dict[str, Any]) -> Dict[str, Any]
def updateRegistration(self, registrationId: str, updateData: Dict[str, Any]) -> Dict[str, Any]
def deleteRegistration(self, registrationId: str) -> bool
def subscribeUser(self, subscriptionId: str, userId: str, channel: MessagingChannel, channelConfig: str) -> Dict[str, Any]
def unsubscribeUser(self, subscriptionId: str, userId: str, channel: MessagingChannel) -> bool
# Delivery Management
def createDelivery(self, delivery: MessagingDelivery) -> Dict[str, Any]
def updateDelivery(self, deliveryId: str, updateData: Dict[str, Any]) -> Dict[str, Any]
def getDeliveries(self, subscriptionId: Optional[str] = None, userId: Optional[str] = None,
pagination: Optional[PaginationParams] = None) -> Union[List[MessagingDelivery], PaginatedResult]
def getDelivery(self, deliveryId: str) -> Optional[MessagingDelivery]
```
## Route-Struktur (routeMessaging.py)
### Rate Limits
- **Subscription Endpoints**: 60 requests/minute pro Session
- **Registration Endpoints**: 60 requests/minute pro Session
- **Trigger Endpoints**: 60 requests/minute pro `subscriptionId`
### Subscription Endpoints
- `GET /api/messaging/subscriptions` - Liste aller Subscriptions
- `POST /api/messaging/subscriptions` - Neue Subscription erstellen
- `GET /api/messaging/subscriptions/{subscriptionId}` - Subscription abrufen
- `PUT /api/messaging/subscriptions/{subscriptionId}` - Subscription aktualisieren
- `DELETE /api/messaging/subscriptions/{subscriptionId}` - Subscription löschen
### Registration Endpoints
- `GET /api/messaging/subscriptions/{subscriptionId}/registrations` - Registrierungen einer Subscription
- `POST /api/messaging/subscriptions/{subscriptionId}/subscribe` - User zu Subscription hinzufügen
- `DELETE /api/messaging/subscriptions/{subscriptionId}/unsubscribe` - User von Subscription entfernen
- `GET /api/messaging/registrations` - Eigene Registrierungen des Users
- `PUT /api/messaging/registrations/{registrationId}` - Registrierung aktualisieren (z.B. enabled/disabled)
- `DELETE /api/messaging/registrations/{registrationId}` - Registrierung löschen
### Trigger Endpoints
- `POST /api/messaging/trigger/{subscriptionId}` - Trigger-Endpunkt für externe Systeme/Workflows
- Body: `{"eventParameters": {...}}`
- Führt `executeSubscription` aus
- Rate Limit: 60 requests/minute pro `subscriptionId`
### Delivery Endpoints
- `GET /api/messaging/deliveries` - Delivery-Historie
- `GET /api/messaging/deliveries/{deliveryId}` - Delivery abrufen
## Use Cases
### 1. Trigger-Route-Endpunkt
```python
@router.post("/api/messaging/trigger/{subscriptionId}")
@limiter.limit("60/minute", key_func=lambda: f"{request.path_params['subscriptionId']}")
async def trigger_subscription(
request: Request,
subscriptionId: str,
eventParameters: Dict[str, Any] = Body(...),
currentUser: User = Depends(getCurrentUser)
):
"""Trigger-Endpunkt für externe Systeme"""
# RBAC-Check: Nur Admin/Mandate-Admin kann triggern
messagingService = request.app.state.services.messaging
# Konvertiere Dict zu Pydantic Model
eventParams = MessagingEventParameters(triggerData=eventParameters)
executionResult = messagingService.executeSubscription(subscriptionId, eventParams)
return executionResult
```
### 2. Workflow-Action
Workflow kann `messaging.executeSubscription` Action aufrufen mit:
- `subscriptionId`: String
- `eventParameters`: Dict (wird zu MessagingEventParameters konvertiert)
### 3. Scheduled Job (System Errors)
```python
def _sendSystemErrorsJob(self):
"""Tägliches Mail an Admin mit Log-Errors"""
# Sammle Errors aus Log
errors = self._collectLogErrors()
if errors:
messagingService = self.services.messaging
eventParams = MessagingEventParameters(triggerData={"errors": errors, "timestamp": getUtcTimestamp()})
messagingService.executeSubscription(
subscriptionId="SystemErrors",
eventParameters=eventParams
)
```
### 4. Audit Log Events
```python
# In audit_logger.py
def logAuditEvent(eventType: str, userId: str, details: Dict):
# ... existing audit logging ...
# Trigger messaging if subscription exists
if eventType == "login":
messagingService = getMessagingService(getAdminUser())
eventParams = MessagingEventParameters(
triggerData={
"eventType": eventType,
"userId": userId,
"details": details,
"timestamp": getUtcTimestamp()
}
)
messagingService.executeSubscription(
subscriptionId="audit_login",
eventParameters=eventParams
)
```
## Error Handling
Fehler werden wie in anderen Modulen behandelt:
- Normale Logger-Ausgabe mit `logger.error()`, `logger.warning()`, `logger.info()`
- Exceptions werden geloggt mit `exc_info=True` für Stack-Traces
- Keine speziellen Error-Handler, Standard-Python-Exception-Handling
## Konfiguration
### Environment Variables
```env
# Email (Azure Communication Services)
MESSAGING_ACS_CONNECTION_STRING=...
MESSAGING_ACS_SENDER_EMAIL=...
# SMS (Twilio)
MESSAGING_TWILIO_ACCOUNT_SID=...
MESSAGING_TWILIO_AUTH_TOKEN=...
MESSAGING_TWILIO_FROM_NUMBER=...
# WhatsApp (zukünftig)
MESSAGING_WHATSAPP_API_KEY=...
# Teams Chat (zukünftig)
MESSAGING_TEAMS_APP_ID=...
MESSAGING_TEAMS_APP_SECRET=...
```
## Implementierungsreihenfolge
1. **Datenmodelle** (`datamodelMessaging.py`) ✅
2. **Connectors** (`connectorMessagingEmail.py`, `connectorMessagingSms.py`)
3. **Interface** (`interfaceMessaging.py`)
4. **Interface-Methoden** (`interfaceDbComponentObjects.py`)
5. **Service-Implementierung** (`serviceMessaging/mainServiceMessaging.py`)
6. **Routes** (`routeMessaging.py`)
7. **Integration** (Service in `__init__.py` registrieren, Routes registrieren)
8. **Subscription-Funktionen** (`serviceMessaging/subscriptions/`)
- Können nachträglich hinzugefügt werden
- System funktioniert auch ohne Funktionen (Users können sich subscriben)
9. **Tests** (Unit-Tests für Service, Integration-Tests für Routes)
**Wichtig**: Die Datenbank-Struktur ist die Basis. Subscription-Funktionen sind optional und können später hinzugefügt werden.

View file

@ -0,0 +1,241 @@
"""
Messaging service for sending messages across different channels.
Provides subscription-based messaging functionality.
"""
import logging
import re
from typing import List, Optional, Callable
from modules.datamodels.datamodelMessaging import (
MessagingSubscription,
MessagingSubscriptionRegistration,
MessagingDelivery,
MessagingChannel,
MessagingEventParameters,
MessagingSendResult,
MessagingSubscriptionExecutionResult,
DeliveryStatus
)
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
from modules.shared.timeUtils import getUtcTimestamp
logger = logging.getLogger(__name__)
class MessagingService:
"""
Messaging service providing subscription-based messaging functionality.
"""
def __init__(self, services):
"""Initialize messaging service with service center access.
Args:
services: Service center instance providing access to interfaces
"""
self.services = services
self._messagingInterface = None
def sendMessage(
self,
subject: str,
message: str,
registration: MessagingSubscriptionRegistration
) -> MessagingSendResult:
"""
Sendet eine Nachricht über einen Channel an einen User.
Erstellt MessagingDelivery Record.
Args:
subject: Subject der Nachricht (für E-Mail, leer für SMS)
message: Nachrichtentext
registration: MessagingSubscriptionRegistration mit Channel-Info und userId
Returns:
MessagingSendResult mit Status und Delivery-ID
"""
# Erstelle Delivery Record
delivery = MessagingDelivery(
subscriptionId=registration.subscriptionId,
userId=registration.userId,
channel=registration.channel,
status=DeliveryStatus.PENDING
)
# Speichere Delivery Record
try:
deliveryRecord = self.services.interfaceDbComponent.createDelivery(delivery)
except Exception as e:
logger.error(f"Failed to create delivery record: {str(e)}")
return MessagingSendResult(
success=False,
errorMessage=f"Failed to create delivery record: {str(e)}"
)
try:
# Versende über interfaceMessaging
success = self._getMessagingInterface().send(
channel=registration.channel,
recipient=registration.channelConfig,
subject=subject,
message=message
)
if success:
# Update Delivery Record
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.SENT,
"sentAt": getUtcTimestamp()
}
)
return MessagingSendResult(
success=True,
deliveryId=deliveryRecord["id"]
)
else:
# Update Delivery Record mit Fehler
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.FAILED,
"errorMessage": "Failed to send message"
}
)
return MessagingSendResult(
success=False,
deliveryId=deliveryRecord["id"],
errorMessage="Failed to send message"
)
except Exception as e:
logger.error(f"Error sending message: {str(e)}")
# Update Delivery Record mit Fehler
try:
self.services.interfaceDbComponent.updateDelivery(
deliveryRecord["id"],
{
"status": DeliveryStatus.FAILED,
"errorMessage": str(e)
}
)
except Exception as updateError:
logger.error(f"Failed to update delivery record: {str(updateError)}")
return MessagingSendResult(
success=False,
deliveryId=deliveryRecord["id"],
errorMessage=str(e)
)
def executeSubscription(
self,
subscriptionId: str,
eventParameters: MessagingEventParameters
) -> MessagingSubscriptionExecutionResult:
"""
Führt eine Subscription-Funktion aus.
Args:
subscriptionId: ID der Subscription
eventParameters: Parameter vom Trigger (als Pydantic Model)
Returns:
MessagingSubscriptionExecutionResult
Raises:
ValueError: Wenn Subscription nicht existiert oder nicht enabled ist
FileNotFoundError: Wenn Subscription-Funktion nicht gefunden wird
"""
# Prüfe ob Subscription existiert und enabled ist
subscription = self.services.interfaceDbComponent.getSubscription(subscriptionId)
if not subscription:
raise ValueError(f"Subscription {subscriptionId} not found")
if not subscription.enabled:
logger.warning(f"Subscription {subscriptionId} is disabled, skipping execution")
return MessagingSubscriptionExecutionResult(
success=False,
messagesSent=0,
errorMessage="Subscription is disabled"
)
# Hole alle aktiven Registrierungen für diese Subscription
registrations = self._getSubscribers(subscriptionId)
if not registrations:
logger.info(f"No active registrations for subscription {subscriptionId}")
return MessagingSubscriptionExecutionResult(
success=True,
messagesSent=0
)
# Lade Subscription-Funktion dynamisch
subscriptionFunction = self._loadSubscriptionFunction(subscriptionId)
if not subscriptionFunction:
errorMsg = f"Subscription function not found for {subscriptionId}"
logger.error(errorMsg)
raise FileNotFoundError(errorMsg)
# Führe Funktion aus mit Registrierungen
try:
return subscriptionFunction.execute(eventParameters, registrations, self)
except Exception as e:
logger.error(f"Error executing subscription {subscriptionId}: {str(e)}", exc_info=True)
return MessagingSubscriptionExecutionResult(
success=False,
messagesSent=0,
errorMessage=str(e)
)
def _getSubscribers(
self,
subscriptionId: str,
channel: Optional[MessagingChannel] = None
) -> List[MessagingSubscriptionRegistration]:
"""Holt alle aktiven Subscriber einer Subscription"""
filters = {"enabled": True}
if channel:
filters["channel"] = channel.value
registrations = self.services.interfaceDbComponent.getAllRegistrations(
subscriptionId=subscriptionId
)
# Filter nach enabled und channel
filteredRegistrations = []
for reg in registrations:
if reg.enabled and (not channel or reg.channel == channel):
filteredRegistrations.append(reg)
return filteredRegistrations
def _loadSubscriptionFunction(self, subscriptionId: str) -> Optional[Callable]:
"""
Lädt die Subscription-Funktion dynamisch.
Returns:
Callable mit execute-Methode oder None wenn nicht gefunden
Note:
subscriptionId wird direkt als Dateiname verwendet (z.B. "SystemErrors" -> subSubscriptionSystemErrors.py)
"""
# Format: subSubscription{subscriptionId}.py
functionName = f"subSubscription{subscriptionId}"
moduleName = f"modules.services.serviceMessaging.subscriptions.{functionName}"
try:
# Dynamisches Import
import importlib
subscriptionModule = importlib.import_module(moduleName)
return subscriptionModule
except ImportError:
# Funktion existiert noch nicht - das ist OK
logger.debug(f"Subscription function {moduleName} not found (this is OK if not yet implemented)")
return None
def _getMessagingInterface(self):
"""Holt das Messaging-Interface (interfaceMessaging)"""
if not self._messagingInterface:
self._messagingInterface = getMessagingInterface()
return self._messagingInterface

View file

@ -0,0 +1,71 @@
"""
Example subscription function for System Errors.
This is a template that can be used as a reference for creating other subscription functions.
"""
from typing import List
from modules.datamodels.datamodelMessaging import (
MessagingEventParameters,
MessagingSubscriptionExecutionResult,
MessagingSubscriptionRegistration,
MessagingChannel
)
def execute(
eventParameters: MessagingEventParameters,
registrations: List[MessagingSubscriptionRegistration],
messagingService
) -> MessagingSubscriptionExecutionResult:
"""
Subscription-Funktion für System-Errors.
Erhält eventParameters vom Trigger und registrations bereits geholt.
Args:
eventParameters: Event-Parameter vom Trigger
registrations: Liste der aktiven Registrierungen für diese Subscription
messagingService: MessagingService-Instanz
Returns:
MessagingSubscriptionExecutionResult mit Status und Anzahl gesendeter Nachrichten
"""
# Gruppiere nach Channel
emailRegistrations = [r for r in registrations if r.channel == MessagingChannel.EMAIL]
smsRegistrations = [r for r in registrations if r.channel == MessagingChannel.SMS]
# Bereite Nachrichten vor (können pro Channel unterschiedlich sein)
triggerData = eventParameters.triggerData
errors = triggerData.get('errors', [])
timestamp = triggerData.get('timestamp', 'Unknown')
emailSubject = "System Error Report"
emailMessage = f"System errors detected at {timestamp}:\n\n{errors}"
smsMessage = f"System Error: {len(errors)} errors detected at {timestamp}"
messagesSent = 0
# Versende über sendMessage
for reg in emailRegistrations:
sendResult = messagingService.sendMessage(
subject=emailSubject,
message=emailMessage,
registration=reg
)
if sendResult.success:
messagesSent += 1
for reg in smsRegistrations:
sendResult = messagingService.sendMessage(
subject="", # SMS hat kein Subject
message=smsMessage,
registration=reg
)
if sendResult.success:
messagesSent += 1
return MessagingSubscriptionExecutionResult(
success=True,
messagesSent=messagesSent
)

View file

@ -7,7 +7,7 @@ import logging
from typing import Optional, Callable
from modules.datamodels.datamodelSecurity import Token
from modules.security.tokenManager import TokenManager
from modules.auth import TokenManager
logger = logging.getLogger(__name__)

View file

@ -6,12 +6,14 @@ Handles workflow context queries and document indexing.
import time
import json
import logging
import aiohttp
from typing import Dict, Any, List
from datetime import datetime, UTC
from modules.workflows.methods.methodBase import MethodBase, action
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
@ -351,3 +353,106 @@ class MethodContext(MethodBase):
return ActionResult.isFailure(error=str(e))
@action
async def triggerPreprocessingServer(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Trigger preprocessing server at customer tenant to update database with configuration.
This action makes a POST request to the preprocessing server endpoint with the provided
configuration JSON. The authorization secret is retrieved from APP_CONFIG using the provided config key.
Parameters:
- endpoint (str, required): The full URL endpoint for the preprocessing server API.
- configJson (dict or str, required): Configuration JSON object to send to the preprocessing server. Can be provided as a dict or as a JSON string that will be parsed.
- authSecretConfigKey (str, required): The APP_CONFIG key name to retrieve the authorization secret from.
Returns:
- ActionResult with ActionDocument containing "ok" on success, or error message on failure.
"""
try:
endpoint = parameters.get("endpoint")
if not endpoint:
return ActionResult.isFailure(error="endpoint parameter is required")
configJsonParam = parameters.get("configJson")
if not configJsonParam:
return ActionResult.isFailure(error="configJson parameter is required")
authSecretConfigKey = parameters.get("authSecretConfigKey")
if not authSecretConfigKey:
return ActionResult.isFailure(error="authSecretConfigKey parameter is required")
# Handle configJson as either dict or JSON string
if isinstance(configJsonParam, str):
try:
configJson = json.loads(configJsonParam)
except json.JSONDecodeError as e:
return ActionResult.isFailure(error=f"configJson is not valid JSON: {str(e)}")
elif isinstance(configJsonParam, dict):
configJson = configJsonParam
else:
return ActionResult.isFailure(error=f"configJson must be a dict or JSON string, got {type(configJsonParam)}")
# Get authorization secret from APP_CONFIG using the provided config key
authSecret = APP_CONFIG.get(authSecretConfigKey)
if not authSecret:
errorMsg = f"{authSecretConfigKey} not found in APP_CONFIG"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)
# Prepare headers with authorization (default headers as in original function)
headers = {
"X-PP-API-Key": authSecret,
"Content-Type": "application/json"
}
# Make POST request
timeout = aiohttp.ClientTimeout(total=60)
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.post(
endpoint,
headers=headers,
json=configJson
) as response:
if response.status in [200, 201]:
responseText = await response.text()
logger.info(f"Preprocessing server trigger successful: {response.status}")
logger.debug(f"Response: {responseText}")
# Generate meaningful filename
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
"preprocessing_result",
"txt",
workflowContext,
"triggerPreprocessingServer"
)
# Create validation metadata
validationMetadata = self._createValidationMetadata(
"triggerPreprocessingServer",
endpoint=endpoint,
statusCode=response.status,
responseText=responseText
)
# Return success with "ok" document
document = ActionDocument(
documentName=filename,
documentData="ok",
mimeType="text/plain",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
else:
errorText = await response.text()
errorMsg = f"Preprocessing server trigger failed: {response.status} - {errorText}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)
except Exception as e:
errorMsg = f"Error triggering preprocessing server: {str(e)}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)

File diff suppressed because it is too large Load diff

View file

@ -2387,3 +2387,452 @@ class MethodSharepoint(MethodBase):
success=False,
error=str(e)
)
@action
async def findSiteByUrl(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Find SharePoint site by hostname and site path.
Parameters:
- connectionReference (str, required): Microsoft connection label.
- hostname (str, required): SharePoint hostname (e.g., "example.sharepoint.com")
- sitePath (str, required): Site path (e.g., "SteeringBPM" or "/sites/SteeringBPM")
Returns:
- ActionResult with ActionDocument containing site information (id, displayName, name, webUrl)
"""
try:
connectionReference = parameters.get("connectionReference")
if not connectionReference:
return ActionResult.isFailure(error="connectionReference parameter is required")
hostname = parameters.get("hostname")
if not hostname:
return ActionResult.isFailure(error="hostname parameter is required")
sitePath = parameters.get("sitePath")
if not sitePath:
return ActionResult.isFailure(error="sitePath parameter is required")
# Get Microsoft connection
connection = self._getMicrosoftConnection(connectionReference)
if not connection:
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
# Find site by URL
siteInfo = await self.services.sharepoint.findSiteByUrl(
hostname=hostname,
sitePath=sitePath
)
if not siteInfo:
return ActionResult.isFailure(error=f"Site not found: {hostname}:/sites/{sitePath}")
logger.info(f"Found SharePoint site: {siteInfo.get('displayName')} (ID: {siteInfo.get('id')})")
# Generate filename
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
"sharepoint_site",
"json",
workflowContext,
"findSiteByUrl"
)
validationMetadata = self._createValidationMetadata(
"findSiteByUrl",
hostname=hostname,
sitePath=sitePath,
siteId=siteInfo.get("id")
)
document = ActionDocument(
documentName=filename,
documentData=json.dumps(siteInfo, indent=2),
mimeType="application/json",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
except Exception as e:
errorMsg = f"Error finding SharePoint site: {str(e)}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)
@action
async def downloadFileByPath(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Download file from SharePoint by exact file path.
Parameters:
- connectionReference (str, required): Microsoft connection label.
- siteId (str, required): SharePoint site ID (from findSiteByUrl result) or document reference containing site info
- filePath (str, required): Full file path relative to site root (e.g., "/General/50 Docs hosted by SELISE/file.xlsx")
Returns:
- ActionResult with ActionDocument containing file content as base64-encoded bytes
"""
try:
connectionReference = parameters.get("connectionReference")
if not connectionReference:
return ActionResult.isFailure(error="connectionReference parameter is required")
siteIdParam = parameters.get("siteId")
if not siteIdParam:
return ActionResult.isFailure(error="siteId parameter is required")
filePath = parameters.get("filePath")
if not filePath:
return ActionResult.isFailure(error="filePath parameter is required")
# Extract siteId from document if it's a reference
siteId = None
if isinstance(siteIdParam, str):
# Try to parse from document reference
from modules.datamodels.datamodelDocref import DocumentReferenceList
try:
docList = DocumentReferenceList.from_string_list([siteIdParam])
chatDocuments = self.services.chat.getChatDocumentsFromDocumentList(docList)
if chatDocuments and len(chatDocuments) > 0:
siteInfoJson = json.loads(chatDocuments[0].documentData)
siteId = siteInfoJson.get("id")
except:
pass
if not siteId:
# Assume it's the site ID directly
siteId = siteIdParam
else:
siteId = siteIdParam
if not siteId:
return ActionResult.isFailure(error="Could not extract siteId from parameter")
# Get Microsoft connection
connection = self._getMicrosoftConnection(connectionReference)
if not connection:
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
# Download file
fileContent = await self.services.sharepoint.downloadFileByPath(
siteId=siteId,
filePath=filePath
)
if fileContent is None:
return ActionResult.isFailure(error=f"File not found or could not be downloaded: {filePath}")
logger.info(f"Downloaded file from SharePoint: {filePath} ({len(fileContent)} bytes)")
# Generate filename from filePath
import os
fileName = os.path.basename(filePath) or "downloaded_file"
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
fileName.split('.')[0] if '.' in fileName else fileName,
fileName.split('.')[-1] if '.' in fileName else "bin",
workflowContext,
"downloadFileByPath"
)
# Encode as base64
import base64
fileBase64 = base64.b64encode(fileContent).decode('utf-8')
validationMetadata = self._createValidationMetadata(
"downloadFileByPath",
siteId=siteId,
filePath=filePath,
fileSize=len(fileContent)
)
document = ActionDocument(
documentName=filename,
documentData=fileBase64,
mimeType="application/octet-stream",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
except Exception as e:
errorMsg = f"Error downloading file from SharePoint: {str(e)}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)
@action
async def copyFile(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Copy file within SharePoint.
Parameters:
- connectionReference (str, required): Microsoft connection label.
- siteId (str, required): SharePoint site ID (from findSiteByUrl result) or document reference containing site info
- sourceFolder (str, required): Source folder path relative to site root
- sourceFile (str, required): Source file name
- destFolder (str, required): Destination folder path relative to site root
- destFile (str, required): Destination file name
Returns:
- ActionResult with ActionDocument containing copy result
"""
try:
connectionReference = parameters.get("connectionReference")
if not connectionReference:
return ActionResult.isFailure(error="connectionReference parameter is required")
siteIdParam = parameters.get("siteId")
if not siteIdParam:
return ActionResult.isFailure(error="siteId parameter is required")
sourceFolder = parameters.get("sourceFolder")
if not sourceFolder:
return ActionResult.isFailure(error="sourceFolder parameter is required")
sourceFile = parameters.get("sourceFile")
if not sourceFile:
return ActionResult.isFailure(error="sourceFile parameter is required")
destFolder = parameters.get("destFolder")
if not destFolder:
return ActionResult.isFailure(error="destFolder parameter is required")
destFile = parameters.get("destFile")
if not destFile:
return ActionResult.isFailure(error="destFile parameter is required")
# Extract siteId from document if it's a reference
siteId = None
if isinstance(siteIdParam, str):
from modules.datamodels.datamodelDocref import DocumentReferenceList
try:
docList = DocumentReferenceList.from_string_list([siteIdParam])
chatDocuments = self.services.chat.getChatDocumentsFromDocumentList(docList)
if chatDocuments and len(chatDocuments) > 0:
siteInfoJson = json.loads(chatDocuments[0].documentData)
siteId = siteInfoJson.get("id")
except:
pass
if not siteId:
siteId = siteIdParam
else:
siteId = siteIdParam
if not siteId:
return ActionResult.isFailure(error="Could not extract siteId from parameter")
# Get Microsoft connection
connection = self._getMicrosoftConnection(connectionReference)
if not connection:
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
# Copy file
await self.services.sharepoint.copyFileAsync(
siteId=siteId,
sourceFolder=sourceFolder,
sourceFile=sourceFile,
destFolder=destFolder,
destFile=destFile
)
logger.info(f"Copied file in SharePoint: {sourceFolder}/{sourceFile} -> {destFolder}/{destFile}")
# Generate filename
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
"file_copy_result",
"json",
workflowContext,
"copyFile"
)
result = {
"success": True,
"siteId": siteId,
"sourcePath": f"{sourceFolder}/{sourceFile}",
"destPath": f"{destFolder}/{destFile}"
}
validationMetadata = self._createValidationMetadata(
"copyFile",
siteId=siteId,
sourcePath=f"{sourceFolder}/{sourceFile}",
destPath=f"{destFolder}/{destFile}"
)
document = ActionDocument(
documentName=filename,
documentData=json.dumps(result, indent=2),
mimeType="application/json",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
except Exception as e:
# Handle file not found gracefully
if "itemNotFound" in str(e) or "404" in str(e):
logger.warning(f"File not found for copy: {parameters.get('sourceFolder')}/{parameters.get('sourceFile')}")
# Return success with skipped status
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
"file_copy_result",
"json",
workflowContext,
"copyFile"
)
result = {
"success": True,
"skipped": True,
"reason": "File not found (may not exist yet)"
}
validationMetadata = self._createValidationMetadata(
"copyFile",
skipped=True
)
document = ActionDocument(
documentName=filename,
documentData=json.dumps(result, indent=2),
mimeType="application/json",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
errorMsg = f"Error copying file in SharePoint: {str(e)}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)
@action
async def uploadFile(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Upload raw file content (bytes) to SharePoint.
Parameters:
- connectionReference (str, required): Microsoft connection label.
- siteId (str, required): SharePoint site ID (from findSiteByUrl result) or document reference containing site info
- folderPath (str, required): Folder path relative to site root
- fileName (str, required): File name
- content (str, required): Document reference containing file content as base64-encoded bytes
Returns:
- ActionResult with ActionDocument containing upload result
"""
try:
connectionReference = parameters.get("connectionReference")
if not connectionReference:
return ActionResult.isFailure(error="connectionReference parameter is required")
siteIdParam = parameters.get("siteId")
if not siteIdParam:
return ActionResult.isFailure(error="siteId parameter is required")
folderPath = parameters.get("folderPath")
if not folderPath:
return ActionResult.isFailure(error="folderPath parameter is required")
fileName = parameters.get("fileName")
if not fileName:
return ActionResult.isFailure(error="fileName parameter is required")
contentParam = parameters.get("content")
if not contentParam:
return ActionResult.isFailure(error="content parameter is required")
# Extract siteId from document if it's a reference
siteId = None
if isinstance(siteIdParam, str):
from modules.datamodels.datamodelDocref import DocumentReferenceList
try:
docList = DocumentReferenceList.from_string_list([siteIdParam])
chatDocuments = self.services.chat.getChatDocumentsFromDocumentList(docList)
if chatDocuments and len(chatDocuments) > 0:
siteInfoJson = json.loads(chatDocuments[0].documentData)
siteId = siteInfoJson.get("id")
except:
pass
if not siteId:
siteId = siteIdParam
else:
siteId = siteIdParam
if not siteId:
return ActionResult.isFailure(error="Could not extract siteId from parameter")
# Get file content from document
from modules.datamodels.datamodelDocref import DocumentReferenceList
docList = DocumentReferenceList.from_string_list([contentParam] if isinstance(contentParam, str) else contentParam)
chatDocuments = self.services.chat.getChatDocumentsFromDocumentList(docList)
if not chatDocuments or len(chatDocuments) == 0:
return ActionResult.isFailure(error="Could not get file content from document reference")
fileContentBase64 = chatDocuments[0].documentData
# Decode base64
import base64
try:
fileContent = base64.b64decode(fileContentBase64)
except Exception as e:
return ActionResult.isFailure(error=f"Could not decode base64 file content: {str(e)}")
# Get Microsoft connection
connection = self._getMicrosoftConnection(connectionReference)
if not connection:
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
# Upload file
uploadResult = await self.services.sharepoint.uploadFile(
siteId=siteId,
folderPath=folderPath,
fileName=fileName,
content=fileContent
)
if "error" in uploadResult:
return ActionResult.isFailure(error=f"Upload failed: {uploadResult['error']}")
logger.info(f"Uploaded file to SharePoint: {folderPath}/{fileName} ({len(fileContent)} bytes)")
# Generate filename
workflowContext = self.services.chat.getWorkflowContext() if hasattr(self.services, 'chat') else None
filename = self._generateMeaningfulFileName(
"file_upload_result",
"json",
workflowContext,
"uploadFile"
)
result = {
"success": True,
"siteId": siteId,
"filePath": f"{folderPath}/{fileName}",
"fileSize": len(fileContent),
"uploadResult": uploadResult
}
validationMetadata = self._createValidationMetadata(
"uploadFile",
siteId=siteId,
filePath=f"{folderPath}/{fileName}",
fileSize=len(fileContent)
)
document = ActionDocument(
documentName=filename,
documentData=json.dumps(result, indent=2),
mimeType="application/json",
validationMetadata=validationMetadata
)
return ActionResult.isSuccess(documents=[document])
except Exception as e:
errorMsg = f"Error uploading file to SharePoint: {str(e)}"
logger.error(errorMsg)
return ActionResult.isFailure(error=errorMsg)

View file

@ -81,53 +81,6 @@ class ContentValidator:
})
return summaries
def _calculateAvailablePromptSpace(self, basePromptSizeBytes: int) -> int:
"""Calculate available space for document summaries based on model context length."""
try:
from modules.aicore.aicoreModelRegistry import modelRegistry
from modules.aicore.aicoreModelSelector import modelSelector
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum
# Get available models
availableModels = modelRegistry.getAvailableModels()
# Create options for PLAN operation (what validation uses)
# Use default values for priority and processingMode (will use defaults from AiCallOptions)
from modules.datamodels.datamodelAi import PriorityEnum, ProcessingModeEnum
options = AiCallOptions(
operationType=OperationTypeEnum.PLAN,
priority=PriorityEnum.BALANCED,
processingMode=ProcessingModeEnum.BASIC
)
# Get failover model list to find the model that will be used
failoverModels = modelSelector.getFailoverModelList("", "", options, availableModels)
if not failoverModels:
# Fallback: assume 16K tokens context (conservative)
logger.warning("No models available for space calculation, using fallback: 16K tokens")
maxBytes = 16 * 1024 * 4 # 16K tokens * 4 bytes per token
else:
# Use the first (best) model
model = failoverModels[0]
# Calculate 80% of context length in bytes (tokens * 4 bytes per token)
maxBytes = int(model.contextLength * 0.8 * 4)
# Available space = max - base prompt - safety margin (10%)
availableBytes = int((maxBytes - basePromptSizeBytes) * 0.9)
# Ensure minimum available space (at least 1KB)
availableBytes = max(availableBytes, 1024)
logger.debug(f"Prompt space calculation: base={basePromptSizeBytes} bytes, max={maxBytes} bytes, available={availableBytes} bytes")
return availableBytes
except Exception as e:
logger.warning(f"Error calculating available prompt space: {str(e)}, using fallback: 8KB")
# Fallback: assume 8KB available
return 8 * 1024
def _summarizeJsonStructure(self, jsonData: Any) -> Dict[str, Any]:
"""Summarize JSON document structure for validation - extracts main objects, statistics, captions, and IDs."""
try:
@ -251,7 +204,7 @@ class ContentValidator:
logger.warning(f"Error summarizing JSON structure: {str(e)}")
return {"error": str(e), "type": "error"}
def _analyzeDocumentsWithSizeLimit(self, documents: List[Any], maxTotalBytes: int) -> List[Dict[str, Any]]:
def _analyzeDocuments(self, documents: List[Any]) -> List[Dict[str, Any]]:
"""
Analyze documents for validation - includes metadata AND JSON structure summary.
JSON summary provides structure information (sections, tables with captions, IDs) without full content.
@ -582,13 +535,8 @@ SUCCESS CRITERIA TO VALIDATE in criteriaMapping array:
DELIVERED DOCUMENTS ({len(documents)} items):
"""
# Calculate available space for document summaries
# Get the model that will be used for validation
basePromptSize = len(promptBase.encode('utf-8'))
availableBytes = self._calculateAvailablePromptSpace(basePromptSize)
# Analyze documents with size constraints
documentSummaries = self._analyzeDocumentsWithSizeLimit(documents, availableBytes)
# Analyze documents
documentSummaries = self._analyzeDocuments(documents)
# Build final prompt with summaries at the end
documentsJson = json.dumps(documentSummaries, indent=2, ensure_ascii=False)

View file

@ -17,7 +17,7 @@ _gateway_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".
if _gateway_path not in sys.path:
sys.path.insert(0, _gateway_path)
from modules.features.chatPlayground.mainChatPlayground import getServices
from modules.services import getInterface as getServices
from modules.datamodels.datamodelAi import (
AiCallOptions,
AiCallRequest,

View file

@ -30,7 +30,7 @@ if _gateway_path not in sys.path:
sys.path.insert(0, _gateway_path)
# Import the service initialization
from modules.features.chatPlayground.mainChatPlayground import getServices
from modules.services import getInterface as getServices
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum
from modules.datamodels.datamodelUam import User

View file

@ -96,7 +96,7 @@ class MethodAiOperationsTester:
interfaceDbChat = interfaceDbChatObjects.getInterface(self.testUser)
# Import and initialize services
from modules.features.chatPlayground.mainChatPlayground import getServices
from modules.services import getInterface as getServices
# Get services first
self.services = getServices(self.testUser, None)

View file

@ -20,7 +20,7 @@ if _gateway_path not in sys.path:
from modules.services import getInterface as getServices
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
from modules.datamodels.datamodelUam import User
from modules.features.chatPlayground.mainChatPlayground import chatStart
from modules.features.workflow import chatStart
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects

View file

@ -22,7 +22,7 @@ if _gateway_path not in sys.path:
from modules.services import getInterface as getServices
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
from modules.datamodels.datamodelUam import User
from modules.features.chatPlayground.mainChatPlayground import chatStart
from modules.features.workflow import chatStart
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects

View file

@ -1,11 +1,11 @@
"""
Unit tests for Options API (mainOptions.py).
Unit tests for Dynamic Options API (mainDynamicOptions.py).
Tests option retrieval, validation, and context-aware options.
"""
import pytest
from unittest.mock import Mock, patch
from modules.features.options.mainOptions import (
from modules.features.dynamicOptions.mainDynamicOptions import (
getOptions,
getAvailableOptionsNames,
STANDARD_ROLES,
@ -92,7 +92,7 @@ class TestMainOptions:
mandateId="mandate1"
)
with patch('modules.features.options.mainOptions.getInterface') as mockGetInterface:
with patch('modules.features.dynamicOptions.mainDynamicOptions.getInterface') as mockGetInterface:
mockInterface = Mock()
mockInterface.getUserConnections.return_value = []
mockGetInterface.return_value = mockInterface
@ -122,7 +122,7 @@ class TestMainOptions:
mockConn2.externalUsername = None
mockConn2.externalId = "external-id-123"
with patch('modules.features.options.mainOptions.getInterface') as mockGetInterface:
with patch('modules.features.dynamicOptions.mainDynamicOptions.getInterface') as mockGetInterface:
mockInterface = Mock()
mockInterface.getUserConnections.return_value = [mockConn1, mockConn2]
mockGetInterface.return_value = mockInterface