Compare commits
49 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 269d0fa5ff | |||
| 1f5b5b5580 | |||
| c42f0403df | |||
| e9ceac139a | |||
|
|
ea566c270f | ||
|
|
af68f6a8c8 | ||
|
|
d3551f0287 | ||
|
|
4dc43b5e8f | ||
|
|
71f4265e06 | ||
|
|
908be0511b | ||
|
|
dc0346904f | ||
|
|
2f1e4a24de | ||
|
|
1a675425a6 | ||
|
|
be43876461 | ||
| dc8cddf4ed | |||
|
|
e3c74329e5 | ||
|
|
5ef311a82e | ||
|
|
67cd15d8ea | ||
|
|
7fd942a1b5 | ||
|
|
d0735ad342 | ||
|
|
cdca242f82 | ||
| 2fe50f648a | |||
| 56068a8174 | |||
| 1c298528a1 | |||
| ef10db5fee | |||
| 8f900c9b54 | |||
| 5354694a14 | |||
| adc4007546 | |||
| 43afbfcdd0 | |||
| e8d3fd72a7 | |||
| 6fd9ce736d | |||
|
|
755e76add5 | ||
|
|
507fd54388 | ||
|
|
99b9dc97b7 | ||
|
|
f29e0c9edc | ||
|
|
b84ca37d99 | ||
|
|
1f40c59afc | ||
|
|
1cc5510888 | ||
|
|
cb5f2d60c4 | ||
|
|
8524392208 | ||
|
|
eea14f89dd | ||
|
|
b4243e1589 | ||
|
|
f89e4ab0c4 | ||
|
|
d8cf4b993e | ||
|
|
d40afae0a9 | ||
|
|
bb0941ffa4 | ||
|
|
9926aa7f30 | ||
|
|
56be2cea63 | ||
|
|
daf76fd166 |
149 changed files with 20486 additions and 743 deletions
1036
.cursor/plans/swift_ios_app_nachbau_3dc75f35.plan.md
Normal file
1036
.cursor/plans/swift_ios_app_nachbau_3dc75f35.plan.md
Normal file
File diff suppressed because it is too large
Load diff
741
.cursor/plans/swift_ios_app_nachbau_80bb1212.plan.md
Normal file
741
.cursor/plans/swift_ios_app_nachbau_80bb1212.plan.md
Normal file
|
|
@ -0,0 +1,741 @@
|
||||||
|
---
|
||||||
|
name: Swift iOS App Nachbau
|
||||||
|
overview: Vollständiger Implementierungsplan für den Nachbau des React-Web-Frontends (frontend_nyla) als native Swift/SwiftUI iOS/iPadOS-App. Die App kommuniziert mit dem bestehenden FastAPI-Gateway-Backend und bildet alle UI-Screens, Navigation und API-Schnittstellen nach.
|
||||||
|
todos:
|
||||||
|
- id: phase-0
|
||||||
|
content: "Phase 0: Xcode-Projekt erstellen, Ordnerstruktur, SPM-Dependencies, Build-Configs (Dev/Int/Prod)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-1
|
||||||
|
content: "Phase 1: Core Networking Layer -- APIClient, SSEClient, WebSocketClient, CSRFManager (analog api.ts + sseClient.ts)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-2
|
||||||
|
content: "Phase 2: Authentication -- LocalAuth, MSAL, Google, Biometrie, Keychain (analog authApi.ts + AuthProvider.tsx)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-3
|
||||||
|
content: "Phase 3: Domain Models + FeatureStore (analog mandate.ts + featureStore.tsx)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-4
|
||||||
|
content: "Phase 4: App Shell -- NavigationSplitView (iPad) / TabView (iPhone), Dashboard, Settings, backend-driven Sidebar"
|
||||||
|
status: pending
|
||||||
|
- id: phase-5
|
||||||
|
content: "Phase 5: i18n String Catalogs (de/en/fr) + Theme System (Light/Dark)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-6
|
||||||
|
content: "Phase 6: Core Pages -- Store, GDPR, Basedata (Prompts/Files/Connections), Billing Transactions"
|
||||||
|
status: pending
|
||||||
|
- id: phase-7
|
||||||
|
content: "Phase 7: Shared UI Components -- FormGenerator, ContentPreview, ChatMessage, AccessRules, NotificationBell"
|
||||||
|
status: pending
|
||||||
|
- id: phase-8
|
||||||
|
content: "Phase 8: Push Notifications (APNs Registration, Deep-Link Handling)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-9
|
||||||
|
content: "Phase 9: Admin Module -- alle 16 Admin-Seiten (Mandates, Users, RBAC, Invitations, Wizards, etc.)"
|
||||||
|
status: pending
|
||||||
|
- id: phase-10
|
||||||
|
content: "Phase 10: Feature Trustee -- Dashboard, Documents, Positions, Roles, Expense-Import, Scan, Accounting"
|
||||||
|
status: pending
|
||||||
|
- id: phase-11
|
||||||
|
content: "Phase 11: Feature Workspace -- Chat-Streaming (SSE), Files, Datasources, Voice"
|
||||||
|
status: pending
|
||||||
|
- id: phase-12
|
||||||
|
content: "Phase 12: Feature Chatbot -- SSE-Streaming Chat, Threads, Conversations"
|
||||||
|
status: pending
|
||||||
|
- id: phase-13
|
||||||
|
content: "Phase 13: Feature Teamsbot -- Sessions, WebSocket Bot-Kommunikation, Voice, MFA"
|
||||||
|
status: pending
|
||||||
|
- id: phase-14
|
||||||
|
content: "Phase 14: Feature CommCoach -- Coaching Sessions, Audio-Streaming, Personas, Dossier"
|
||||||
|
status: pending
|
||||||
|
- id: phase-15
|
||||||
|
content: "Phase 15: Feature ChatPlayground -- Workflows, Playground mit SSE-Stream"
|
||||||
|
status: pending
|
||||||
|
- id: phase-16
|
||||||
|
content: "Phase 16: Feature Automation -- Definitions, Templates, Logs, Execute"
|
||||||
|
status: pending
|
||||||
|
- id: phase-17
|
||||||
|
content: "Phase 17: Feature CodeEditor -- Editor mit SSE-Stream, Code-Anzeige, Apply"
|
||||||
|
status: pending
|
||||||
|
- id: phase-18
|
||||||
|
content: "Phase 18: Feature RealEstate/PEK -- MapKit-Integration, Parcels, Address-Search, BZO"
|
||||||
|
status: pending
|
||||||
|
- id: phase-19
|
||||||
|
content: "Phase 19: Feature Neutralization -- Config, Neutralize Text/File"
|
||||||
|
status: pending
|
||||||
|
- id: phase-20
|
||||||
|
content: "Phase 20: Billing-Erweiterung -- Admin-Views, Stripe Checkout"
|
||||||
|
status: pending
|
||||||
|
isProject: false
|
||||||
|
---
|
||||||
|
|
||||||
|
# Nyla iOS/iPadOS App -- Vollständiger Implementierungsplan
|
||||||
|
|
||||||
|
## Ausgangslage
|
||||||
|
|
||||||
|
Das bestehende Web-Frontend (`frontend_nyla`) ist eine **React 19 + Vite + TypeScript** Anwendung mit:
|
||||||
|
|
||||||
|
- **12+ Feature-Module** (Trustee, Workspace, Chatbot, Teamsbot, CommCoach, CodeEditor, Automation, RealEstate, Neutralization, ChatPlayground, Billing, Admin)
|
||||||
|
- **21 API-Module** unter `src/api/*.ts` mit insgesamt **200+ API-Endpunkten**
|
||||||
|
- **120+ UI-Komponenten** inkl. dynamischem FormGenerator, ContentPreview, Chat-Streaming, Maps, Charts
|
||||||
|
- **Multi-Tenant-Architektur**: Mandate > Features > Instanzen > Views/Permissions
|
||||||
|
- **3 Auth-Provider**: Local, Microsoft MSAL, Google OAuth
|
||||||
|
- **Echtzeit**: SSE-Streaming (Chat, Workspace, CodeEditor) + WebSockets (Voice)
|
||||||
|
- **Backend**: FastAPI (Python) auf PostgreSQL, erreichbar unter konfigurierbarer `VITE_API_BASE_URL`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technische Entscheidungen
|
||||||
|
|
||||||
|
|
||||||
|
| Aspekt | Entscheidung |
|
||||||
|
| -------------------- | --------------------------------------------------- |
|
||||||
|
| Plattform | iOS 18+ / iPadOS 18+ |
|
||||||
|
| UI-Framework | SwiftUI |
|
||||||
|
| Architektur | **MVVM + Repository Pattern** (s. unten) |
|
||||||
|
| Networking | URLSession + async/await |
|
||||||
|
| SSE | Custom SSE-Client auf URLSession-Basis |
|
||||||
|
| WebSocket | URLSessionWebSocketTask |
|
||||||
|
| Auth | MSAL SDK, Google Sign-In SDK, Keychain + Local Auth |
|
||||||
|
| Biometrie | LocalAuthentication (Face ID / Touch ID) |
|
||||||
|
| State | `@Observable` (Observation Framework, iOS 17+) |
|
||||||
|
| Navigation | `NavigationStack` + `NavigationSplitView` (iPad) |
|
||||||
|
| Dependency Injection | Environment-basiert (SwiftUI `@Environment`) |
|
||||||
|
| Package Manager | Swift Package Manager (SPM) |
|
||||||
|
| Karten | MapKit (SwiftUI) |
|
||||||
|
| Charts | Swift Charts |
|
||||||
|
| i18n | String Catalogs (`.xcstrings`) fuer de/en/fr |
|
||||||
|
| Push | APNs + UserNotifications Framework |
|
||||||
|
| PDF-Anzeige | PDFKit |
|
||||||
|
| Markdown | Native AttributedString (iOS 15+) |
|
||||||
|
| Persistenz | Keychain (Secrets), UserDefaults (Preferences) |
|
||||||
|
| Distribution | TestFlight |
|
||||||
|
|
||||||
|
|
||||||
|
### Architektur: MVVM + Repository Pattern
|
||||||
|
|
||||||
|
```
|
||||||
|
Presentation Layer (SwiftUI Views)
|
||||||
|
|
|
||||||
|
v
|
||||||
|
ViewModels (@Observable)
|
||||||
|
|
|
||||||
|
v
|
||||||
|
Repositories (Protokolle)
|
||||||
|
|
|
||||||
|
v
|
||||||
|
API Services (URLSession)
|
||||||
|
|
|
||||||
|
v
|
||||||
|
Gateway Backend (FastAPI)
|
||||||
|
```
|
||||||
|
|
||||||
|
Begründung: SwiftUI ist nativ MVVM-orientiert. Das Repository Pattern kapselt die Datenzugriffe und macht den Code testbar. `@Observable` (iOS 17+) ist leichter als `ObservableObject` und performanter.
|
||||||
|
|
||||||
|
### Projektstruktur
|
||||||
|
|
||||||
|
```
|
||||||
|
NylaApp/
|
||||||
|
NylaApp.swift // App Entry Point
|
||||||
|
Config/
|
||||||
|
AppConfig.swift // API URLs, Build Configs
|
||||||
|
Environment.swift // Dev/Int/Prod Environments
|
||||||
|
Core/
|
||||||
|
Networking/
|
||||||
|
APIClient.swift // Zentraler HTTP-Client (= api.ts)
|
||||||
|
APIError.swift // Error Types
|
||||||
|
APIEndpoints.swift // Endpoint Definitionen
|
||||||
|
SSEClient.swift // Server-Sent Events Client
|
||||||
|
WebSocketClient.swift // WebSocket Client
|
||||||
|
CSRFManager.swift // CSRF Token Handling
|
||||||
|
RequestInterceptor.swift // Auth/Mandate Headers
|
||||||
|
Auth/
|
||||||
|
AuthManager.swift // Zentrale Auth-Logik
|
||||||
|
LocalAuthService.swift // Username/Password
|
||||||
|
MSALAuthService.swift // Microsoft MSAL
|
||||||
|
GoogleAuthService.swift // Google Sign-In
|
||||||
|
BiometricAuthService.swift // Face ID / Touch ID
|
||||||
|
KeychainService.swift // Secure Storage
|
||||||
|
Navigation/
|
||||||
|
AppRouter.swift // Root Navigation
|
||||||
|
NavigationStore.swift // Backend-driven Nav State
|
||||||
|
DeepLinkHandler.swift // URL Scheme Handling
|
||||||
|
Localization/
|
||||||
|
Localizable.xcstrings // String Catalog
|
||||||
|
LanguageManager.swift // Sprachauswahl
|
||||||
|
Theme/
|
||||||
|
ThemeManager.swift // Light/Dark Mode
|
||||||
|
DesignTokens.swift // Farben, Spacing, Fonts
|
||||||
|
Permissions/
|
||||||
|
PermissionChecker.swift // RBAC Client-Checks
|
||||||
|
Domain/
|
||||||
|
Models/ // Shared Domain Models
|
||||||
|
Mandate.swift // Mandate, Feature, Instance
|
||||||
|
User.swift // User Model
|
||||||
|
Permissions.swift // AccessLevel, TablePermission
|
||||||
|
Pagination.swift // PaginatedResponse<T>
|
||||||
|
I18nLabel.swift // Mehrsprachige Labels
|
||||||
|
Repositories/ // Repository Protokolle
|
||||||
|
AuthRepository.swift
|
||||||
|
MandateRepository.swift
|
||||||
|
FeatureRepository.swift
|
||||||
|
...
|
||||||
|
Data/
|
||||||
|
API/ // API-Implementierungen (= src/api/*.ts)
|
||||||
|
AuthAPI.swift
|
||||||
|
UserAPI.swift
|
||||||
|
MandateAPI.swift
|
||||||
|
FeaturesAPI.swift
|
||||||
|
BillingAPI.swift
|
||||||
|
TrusteeAPI.swift
|
||||||
|
... (21 Module)
|
||||||
|
Repositories/ // Repository Implementierungen
|
||||||
|
DefaultAuthRepository.swift
|
||||||
|
DefaultMandateRepository.swift
|
||||||
|
...
|
||||||
|
Features/ // Feature-Module (je Ordner)
|
||||||
|
Dashboard/
|
||||||
|
Store/
|
||||||
|
Settings/
|
||||||
|
GDPR/
|
||||||
|
Basedata/
|
||||||
|
Prompts/
|
||||||
|
Files/
|
||||||
|
Connections/
|
||||||
|
Billing/
|
||||||
|
Admin/
|
||||||
|
Mandates/
|
||||||
|
Users/
|
||||||
|
Access/
|
||||||
|
Invitations/
|
||||||
|
...
|
||||||
|
Trustee/
|
||||||
|
Workspace/
|
||||||
|
Chatbot/
|
||||||
|
Teamsbot/
|
||||||
|
CommCoach/
|
||||||
|
CodeEditor/
|
||||||
|
ChatPlayground/
|
||||||
|
Automation/
|
||||||
|
RealEstate/
|
||||||
|
Neutralization/
|
||||||
|
Shared/
|
||||||
|
Components/ // Wiederverwendbare UI (= src/components/)
|
||||||
|
FormGenerator/ // Dynamische Formulare
|
||||||
|
ContentPreview/ // PDF, Bild, JSON Vorschau
|
||||||
|
ChatMessage/ // Chat-Nachrichten-Rendering
|
||||||
|
AccessRules/ // Zugriffsregeln-Editor
|
||||||
|
NotificationBell/ // Notification Badge + Overlay
|
||||||
|
SearchBar/
|
||||||
|
LoadingView/
|
||||||
|
ErrorView/
|
||||||
|
EmptyStateView/
|
||||||
|
Extensions/
|
||||||
|
Utilities/
|
||||||
|
Resources/
|
||||||
|
Assets.xcassets
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phasen-Plan
|
||||||
|
|
||||||
|
### Phase 0: Projekt-Setup (1-2 Tage)
|
||||||
|
|
||||||
|
- Xcode-Projekt erstellen (iOS 18+, SwiftUI App Lifecycle)
|
||||||
|
- Ordnerstruktur nach obigem Schema anlegen
|
||||||
|
- SPM Dependencies einrichten:
|
||||||
|
- `MSAL` (Microsoft Authentication Library for iOS)
|
||||||
|
- `GoogleSignIn` (Google Sign-In SDK)
|
||||||
|
- Keine weiteren externen Deps noetig (MapKit, Charts, PDFKit sind System-Frameworks)
|
||||||
|
- Build-Konfigurationen: **Dev** / **Int** / **Prod** mit je eigenem `API_BASE_URL`
|
||||||
|
- Analog zu den `.env.dev` / `.env.int` / `.env.prod` Dateien im Web-Frontend
|
||||||
|
- Werte: `http://localhost:8000` (Dev), INT-URL, PROD-URL
|
||||||
|
- TestFlight-Vorbereitung: App ID, Provisioning Profile, Signing
|
||||||
|
|
||||||
|
### Phase 1: Core Networking Layer (3-5 Tage)
|
||||||
|
|
||||||
|
**Ziel**: Equivalent zu `[src/api.ts](frontend_nyla/src/api.ts)` + `[src/hooks/useApi.ts](frontend_nyla/src/hooks/useApi.ts)`
|
||||||
|
|
||||||
|
**APIClient.swift** -- Zentraler HTTP-Client:
|
||||||
|
|
||||||
|
- `URLSession.shared` mit Custom-Configuration
|
||||||
|
- Cookie-basierte Auth (`httpCookieStorage`)
|
||||||
|
- Request-Interceptor fuer:
|
||||||
|
- `Authorization: Bearer` Header (aus Keychain)
|
||||||
|
- `X-Mandate-Id` / `X-Instance-Id` Header (aus aktuellem Navigation-Context)
|
||||||
|
- CSRF-Token fuer POST/PUT/PATCH/DELETE
|
||||||
|
- Response-Handler:
|
||||||
|
- 401 -> Redirect zu Login (analog Web `api.ts` Zeile 127-151)
|
||||||
|
- 429 -> Rate-Limit Warning
|
||||||
|
- Generische Fehlerextraktion (FastAPI `detail` Array/String)
|
||||||
|
- Generische Request-Methoden: `get<T>()`, `post<T>()`, `put<T>()`, `delete<T>()`, `upload()`
|
||||||
|
- `Codable`-basierte JSON Serialisierung
|
||||||
|
|
||||||
|
**SSEClient.swift** -- Server-Sent Events:
|
||||||
|
|
||||||
|
- Analog zu `[src/utils/sseClient.ts](frontend_nyla/src/utils/sseClient.ts)`
|
||||||
|
- URLSession mit `bytes(for:)` async stream
|
||||||
|
- Parsing von `data:` Lines
|
||||||
|
- Callbacks: `onMessage`, `onError`, `onComplete`
|
||||||
|
- Wird benoetigt fuer: Workspace, Chatbot, CodeEditor, CommCoach Streaming
|
||||||
|
|
||||||
|
**WebSocketClient.swift** -- WebSockets:
|
||||||
|
|
||||||
|
- `URLSessionWebSocketTask`
|
||||||
|
- Fuer Voice-Features (Teamsbot: `/api/teamsbot/{instanceId}/bot/ws/{sessionId}`)
|
||||||
|
- Ping/Pong, Reconnect-Logik
|
||||||
|
|
||||||
|
**CSRFManager.swift**:
|
||||||
|
|
||||||
|
- Token-Generierung und -Speicherung
|
||||||
|
- Analog zu `[src/utils/csrfUtils.ts](frontend_nyla/src/utils/csrfUtils.ts)`
|
||||||
|
|
||||||
|
### Phase 2: Authentication (3-5 Tage)
|
||||||
|
|
||||||
|
**Ziel**: Alle 3 Auth-Provider + Biometrie
|
||||||
|
|
||||||
|
**Mapping Web -> Swift:**
|
||||||
|
|
||||||
|
|
||||||
|
| Web (authApi.ts) | Swift |
|
||||||
|
| ---------------------------------------- | -------------------------------------------- |
|
||||||
|
| `POST /api/local/login` (form-data) | `LocalAuthService.login(username:password:)` |
|
||||||
|
| `POST /api/local/register` | `LocalAuthService.register(...)` |
|
||||||
|
| `POST /api/local/password-reset-request` | `LocalAuthService.requestPasswordReset(...)` |
|
||||||
|
| `POST /api/local/password-reset` | `LocalAuthService.resetPassword(...)` |
|
||||||
|
| `GET /api/local/available?username=` | `LocalAuthService.checkAvailability(...)` |
|
||||||
|
| `GET /api/local/me` | `AuthManager.fetchCurrentUser()` |
|
||||||
|
| `POST /api/local/logout` | `AuthManager.logout()` |
|
||||||
|
| MSAL Login/Callback | `MSALAuthService` via MSAL SDK |
|
||||||
|
| `GET /api/msft/me` | `MSALAuthService.fetchUser()` |
|
||||||
|
| Google Login/Callback | `GoogleAuthService` via Google Sign-In SDK |
|
||||||
|
| `GET /api/google/me` | `GoogleAuthService.fetchUser()` |
|
||||||
|
|
||||||
|
|
||||||
|
**AuthManager.swift** (zentral):
|
||||||
|
|
||||||
|
- Verwaltet aktiven Auth-Provider (`local` / `msft` / `google`)
|
||||||
|
- Speichert Auth-State in Keychain (nicht UserDefaults!)
|
||||||
|
- Published `isAuthenticated`, `currentUser`, `authAuthority`
|
||||||
|
- Analog zu `[src/providers/auth/AuthProvider.tsx](frontend_nyla/src/providers/auth/AuthProvider.tsx)`
|
||||||
|
|
||||||
|
**BiometricAuthService.swift**:
|
||||||
|
|
||||||
|
- `LAContext.evaluatePolicy(.deviceOwnerAuthenticationWithBiometrics)`
|
||||||
|
- Nach erstem erfolgreichen Login: Credentials in Keychain speichern
|
||||||
|
- Bei App-Start: Face ID/Touch ID -> Keychain Credentials -> Auto-Login
|
||||||
|
|
||||||
|
**Login Screen (SwiftUI)**:
|
||||||
|
|
||||||
|
- Username/Password Felder
|
||||||
|
- "Anmelden mit Microsoft" Button (MSAL)
|
||||||
|
- "Anmelden mit Google" Button (Google Sign-In)
|
||||||
|
- "Face ID / Touch ID" Option (wenn verfuegbar)
|
||||||
|
- Registrierung / Passwort vergessen Links
|
||||||
|
- Analog zu `[src/pages/Login.tsx](frontend_nyla/src/pages/Login.tsx)`
|
||||||
|
|
||||||
|
### Phase 3: Domain Models + Feature Store (2-3 Tage)
|
||||||
|
|
||||||
|
**Ziel**: Alle geteilten Datenmodelle + Feature-State
|
||||||
|
|
||||||
|
Zentrale Models (analog zu `[src/types/mandate.ts](frontend_nyla/src/types/mandate.ts)`):
|
||||||
|
|
||||||
|
```swift
|
||||||
|
// Mandate.swift
|
||||||
|
struct I18nLabel: Codable { var de: String; var en: String; var fr: String? }
|
||||||
|
enum AccessLevel: String, Codable { case none = "n", my = "m", group = "g", all = "a" }
|
||||||
|
struct TablePermission: Codable { var view: Bool; var read, create, update, delete: AccessLevel }
|
||||||
|
struct FieldPermission: Codable { var read: Bool; var write: Bool }
|
||||||
|
struct InstancePermissions: Codable { var tables: [String: TablePermission]; var fields: [String: [String: FieldPermission]]?; var views: [String: Bool]; var isAdmin: Bool? }
|
||||||
|
struct FeatureInstance: Codable, Identifiable { var id: String; var featureCode, mandateId, mandateName, instanceLabel: String; var userRoles: [String]; var permissions: InstancePermissions }
|
||||||
|
struct MandateFeature: Codable { var code: String; var label: I18nLabel; var icon: String; var instances: [FeatureInstance] }
|
||||||
|
struct Mandate: Codable, Identifiable { var id, name: String; var label, code: String?; var features: [MandateFeature] }
|
||||||
|
struct FeaturesMyResponse: Codable { var mandates: [Mandate] }
|
||||||
|
```
|
||||||
|
|
||||||
|
**FeatureStore.swift** (analog zu `[src/stores/featureStore.tsx](frontend_nyla/src/stores/featureStore.tsx)`):
|
||||||
|
|
||||||
|
- `@Observable class FeatureStore`
|
||||||
|
- `loadFeatures()` -> `GET /api/features/my`
|
||||||
|
- Cache: `[String: FeatureInstance]` fuer schnellen Zugriff
|
||||||
|
- Methoden: `getMandateById()`, `getInstanceById()`, `getAllInstances()`, etc.
|
||||||
|
- Injected via SwiftUI `@Environment`
|
||||||
|
|
||||||
|
### Phase 4: App Shell + Navigation (4-6 Tage)
|
||||||
|
|
||||||
|
**Ziel**: MainLayout + FeatureLayout + backend-driven Navigation
|
||||||
|
|
||||||
|
**Adaptive Layout:**
|
||||||
|
|
||||||
|
- **iPad**: `NavigationSplitView` (Sidebar + Detail) -- analog Web-Sidebar
|
||||||
|
- **iPhone**: `TabView` mit Hauptbereichen + Navigation Stack pro Tab
|
||||||
|
|
||||||
|
**Sidebar / Navigation:**
|
||||||
|
|
||||||
|
- Backend-driven: `GET /api/navigation?language={lang}` liefert Navigationsbaum
|
||||||
|
- Analog zu `[src/components/Navigation/MandateNavigation.tsx](frontend_nyla/src/components/Navigation/MandateNavigation.tsx)`
|
||||||
|
- Hierarchie: Mandate > Feature > Instance > Views
|
||||||
|
- Icon-Mapping: SF Symbols statt React Icons (Mapping-Tabelle erstellen)
|
||||||
|
|
||||||
|
**Screen-Routing:**
|
||||||
|
|
||||||
|
- `NavigationStack` mit `NavigationPath` fuer programmatische Navigation
|
||||||
|
- Deep-Link-Schema: `nyla://mandates/{mandateId}/{featureCode}/{instanceId}/{view}`
|
||||||
|
- Feature-View-Dispatcher: analog zu `[src/pages/FeatureView.tsx](frontend_nyla/src/pages/FeatureView.tsx)` `VIEW_COMPONENTS`
|
||||||
|
|
||||||
|
**Screens in Phase 4:**
|
||||||
|
|
||||||
|
- Dashboard (`/`) -- Mandate/Instance-Karten, analog `[src/pages/Dashboard.tsx](frontend_nyla/src/pages/Dashboard.tsx)`
|
||||||
|
- Settings (`/settings`) -- Theme-Toggle, Sprache (de/en/fr), Profil
|
||||||
|
- UserSection im Sidebar-Footer
|
||||||
|
|
||||||
|
### Phase 5: i18n + Theme (2-3 Tage)
|
||||||
|
|
||||||
|
**Internationalisierung:**
|
||||||
|
|
||||||
|
- Xcode String Catalog (`.xcstrings`) fuer de/en/fr
|
||||||
|
- Alle statischen Strings aus den Web-Locales uebernehmen: `[src/locales/de.ts](frontend_nyla/src/locales/de.ts)`, `en.ts`, `fr.ts`
|
||||||
|
- Dynamische Labels (I18nLabel vom Backend): Helper `label.localized(lang:)` analog `getLabel()` im Web
|
||||||
|
- `LanguageManager` speichert Praeferenz in UserDefaults
|
||||||
|
|
||||||
|
**Theme:**
|
||||||
|
|
||||||
|
- SwiftUI `.preferredColorScheme()` fuer System-Integration
|
||||||
|
- Custom `DesignTokens` fuer konsistente Farben/Spacing
|
||||||
|
- Analog zu `[src/styles/themes/light.css](frontend_nyla/src/styles/themes/light.css)` + `.dark-theme`
|
||||||
|
|
||||||
|
### Phase 6: Core Pages (5-7 Tage)
|
||||||
|
|
||||||
|
**Store** (Feature Marketplace):
|
||||||
|
|
||||||
|
- `GET /api/store/features` -> Feature-Liste
|
||||||
|
- `POST /api/store/activate` / `POST /api/store/deactivate`
|
||||||
|
- Analog `[src/pages/Store.tsx](frontend_nyla/src/pages/Store.tsx)`
|
||||||
|
|
||||||
|
**GDPR**:
|
||||||
|
|
||||||
|
- `GET /api/user/me/data-export` + `/data-portability`
|
||||||
|
- `DELETE /api/user/me/`
|
||||||
|
- Analog `[src/pages/GDPR.tsx](frontend_nyla/src/pages/GDPR.tsx)`
|
||||||
|
|
||||||
|
**Basedata - Prompts** (`/basedata/prompts`):
|
||||||
|
|
||||||
|
- CRUD auf `/api/prompts` mit FormGenerator
|
||||||
|
- Analog `[src/pages/PromptsPage.tsx](frontend_nyla/src/pages/PromptsPage.tsx)`
|
||||||
|
|
||||||
|
**Basedata - Files** (`/basedata/files`):
|
||||||
|
|
||||||
|
- `GET /api/files/list`, Upload, Download, Preview
|
||||||
|
- Analog `[src/pages/FilesPage.tsx](frontend_nyla/src/pages/FilesPage.tsx)`
|
||||||
|
- Nutzung von `UIDocumentPickerViewController` (via UIKit-Bridge) fuer File-Upload
|
||||||
|
- `QuickLook` fuer Dateivorschau
|
||||||
|
|
||||||
|
**Basedata - Connections** (`/basedata/connections`):
|
||||||
|
|
||||||
|
- CRUD auf `/api/connections/`
|
||||||
|
- Connect/Disconnect Aktionen
|
||||||
|
- Analog `[src/pages/ConnectionsPage.tsx](frontend_nyla/src/pages/ConnectionsPage.tsx)`
|
||||||
|
|
||||||
|
**Billing** (`/billing/transactions`):
|
||||||
|
|
||||||
|
- `GET /api/billing/balance`, `/transactions`, `/statistics/{period}`
|
||||||
|
- Swift Charts fuer Statistik-Visualisierung
|
||||||
|
- Analog `[src/pages/billing/BillingDataView.tsx](frontend_nyla/src/pages/billing/BillingDataView.tsx)`
|
||||||
|
|
||||||
|
### Phase 7: Shared UI Components (5-8 Tage)
|
||||||
|
|
||||||
|
**FormGenerator** (zentral, wird von fast allen Features genutzt):
|
||||||
|
|
||||||
|
- Analog zu `[src/components/FormGenerator/](frontend_nyla/src/components/FormGenerator/)`
|
||||||
|
- Dynamische Formulare basierend auf `AttributeDefinition[]` vom Backend (`GET /api/attributes/{entityType}`)
|
||||||
|
- Feldtypen: String, Email, Select, Multiselect, Textarea, Checkbox, File, Number, DateTime, Multilingual
|
||||||
|
- Tabellen-Ansicht (`FormGeneratorTable`) + Listen-Ansicht (`FormGeneratorList`)
|
||||||
|
- Action Buttons (Edit, Delete, Download, Custom)
|
||||||
|
- Pagination-Support
|
||||||
|
|
||||||
|
**ContentPreview**:
|
||||||
|
|
||||||
|
- PDF: `PDFKitView` (UIKit PDFView in UIViewRepresentable)
|
||||||
|
- Bilder: AsyncImage
|
||||||
|
- JSON: Syntax-Highlighting
|
||||||
|
- HTML: WKWebView
|
||||||
|
- Analog `[src/components/ContentPreview/](frontend_nyla/src/components/ContentPreview/)`
|
||||||
|
|
||||||
|
**NotificationBell**:
|
||||||
|
|
||||||
|
- `GET /api/notifications/unread-count` (Polling)
|
||||||
|
- Push Notifications via APNs
|
||||||
|
- In-App Notification Sheet
|
||||||
|
- Analog `[src/components/NotificationBell/](frontend_nyla/src/components/NotificationBell/)`
|
||||||
|
|
||||||
|
**Chat Message Components**:
|
||||||
|
|
||||||
|
- Message-Bubbles mit Markdown-Rendering
|
||||||
|
- File-Attachments
|
||||||
|
- Streaming-Indicator (typing animation)
|
||||||
|
- Auto-Scroll
|
||||||
|
- Analog `[src/components/UiComponents/Messages/](frontend_nyla/src/components/UiComponents/Messages/)`
|
||||||
|
|
||||||
|
**AccessRules Components**:
|
||||||
|
|
||||||
|
- Tabelle + Editor fuer RBAC-Regeln
|
||||||
|
- Analog `[src/components/AccessRules/](frontend_nyla/src/components/AccessRules/)`
|
||||||
|
|
||||||
|
### Phase 8: Push Notifications (2-3 Tage)
|
||||||
|
|
||||||
|
- APNs-Registrierung in `AppDelegate`
|
||||||
|
- Device Token an Backend senden (neuer Endpoint oder bestehender `/api/messaging/subscriptions`)
|
||||||
|
- `UNUserNotificationCenter` fuer lokale + remote Notifications
|
||||||
|
- Deep-Link Handling aus Notification-Tap
|
||||||
|
|
||||||
|
### Phase 9: Admin Module (5-7 Tage)
|
||||||
|
|
||||||
|
Alle Admin-Seiten analog zu `[src/pages/admin/](frontend_nyla/src/pages/admin/)`:
|
||||||
|
|
||||||
|
|
||||||
|
| Admin-Seite | API-Endpunkte |
|
||||||
|
| -------------------- | ------------------------------------------ |
|
||||||
|
| Mandates | CRUD `/api/mandates/` |
|
||||||
|
| Users | CRUD `/api/users/` |
|
||||||
|
| User-Mandates | `/api/mandates/{id}/users` |
|
||||||
|
| Access Hub | `/api/rbac/permissions`, `/api/rbac/rules` |
|
||||||
|
| Feature Instances | `/api/features/instances` |
|
||||||
|
| Feature Roles | `/api/features/templates/roles` |
|
||||||
|
| Feature Users | `/api/features/instances/{id}/users` |
|
||||||
|
| Invitations | CRUD `/api/invitations/` |
|
||||||
|
| Mandate Roles | `/api/rbac/roles` |
|
||||||
|
| Role Permissions | `/api/rbac/rules/by-role/{roleId}` |
|
||||||
|
| User Access Overview | `/api/admin/user-access-overview/`* |
|
||||||
|
| Billing Admin | `/api/billing/admin/`* |
|
||||||
|
| Automation Events | `/api/admin/automation-events` |
|
||||||
|
| Logs | `/api/admin/logs` |
|
||||||
|
| Mandate Wizard | Kombination mehrerer Endpoints |
|
||||||
|
| Invitation Wizard | Kombination mehrerer Endpoints |
|
||||||
|
|
||||||
|
|
||||||
|
### Phase 10-20: Feature-Module (je 3-7 Tage pro Feature)
|
||||||
|
|
||||||
|
Jedes Feature folgt demselben Pattern:
|
||||||
|
|
||||||
|
1. **API-Modul** erstellen (alle Endpunkte des Features)
|
||||||
|
2. **ViewModels** fuer jede View
|
||||||
|
3. **SwiftUI Views** fuer jede registrierte View
|
||||||
|
4. **Feature-spezifische Komponenten** wo noetig
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Phase 10: Trustee (5-7 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard, Documents, Positions, Instance-Roles, Expense-Import, Scan-Upload, Accounting Settings
|
||||||
|
|
||||||
|
API-Basis: `/api/trustee/{instanceId}/`
|
||||||
|
|
||||||
|
- Organisations, Roles, Access, Contracts, Documents, Positions CRUD
|
||||||
|
- Accounting: Connectors, Config, Sync
|
||||||
|
- Document Upload mit base64-Konvertierung
|
||||||
|
- Options-Endpoints fuer Dropdowns
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- Viele verschachtelte CRUD-Entitaeten (Organisation > Contract > Document > Position)
|
||||||
|
- Scan-Upload: iOS-Kamera-Integration + VisionKit (OCR)
|
||||||
|
|
||||||
|
#### Phase 11: Workspace (5-7 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard (Chat-Stream), Settings
|
||||||
|
|
||||||
|
API-Basis: `/api/workspace/{instanceId}/`
|
||||||
|
|
||||||
|
- SSE-Streaming fuer Chat (`POST .../start/stream`)
|
||||||
|
- Workflows, Messages, Files, Datasources CRUD
|
||||||
|
- Voice: Transcribe, Synthesize, Settings
|
||||||
|
- File Browser mit Ordnerstruktur
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- **Zentrales SSE-Streaming** -- das Keep-Alive-Pattern aus dem Web (`WorkspaceKeepAlive`) muss in Swift via Task/Actor geloest werden
|
||||||
|
- Voice: AVFoundation fuer Audio-Aufnahme, URLSession fuer Upload
|
||||||
|
|
||||||
|
#### Phase 12: Chatbot (3-5 Tage)
|
||||||
|
|
||||||
|
Views: Conversations, Settings
|
||||||
|
|
||||||
|
API-Basis: `/api/chatbot/{instanceId}/`
|
||||||
|
|
||||||
|
- `POST .../start/stream` -- SSE-Streaming via fetch (nicht Axios!)
|
||||||
|
- Threads: List, Get, Delete
|
||||||
|
- Stop Workflow
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- Streaming-Chat mit File-Attachments
|
||||||
|
- Analog zu `chatbotApi.startChatbotStreamApi` -- Custom SSE via POST
|
||||||
|
|
||||||
|
#### Phase 13: Teamsbot (4-6 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard, Sessions, Settings
|
||||||
|
|
||||||
|
API-Basis: `/api/teamsbot/{instanceId}/`
|
||||||
|
|
||||||
|
- Sessions CRUD + Stream (EventSource/SSE)
|
||||||
|
- Config, System Bots, User Account
|
||||||
|
- Voice Test
|
||||||
|
- MFA fuer Sessions
|
||||||
|
- WebSocket fuer Bot-Kommunikation (`/bot/ws/{sessionId}`)
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- **WebSocket** fuer Live-Bot-Interaction
|
||||||
|
- SSE via EventSource fuer Session-Stream
|
||||||
|
- Screenshot-Anzeige
|
||||||
|
|
||||||
|
#### Phase 14: CommCoach (4-6 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard, Coaching, Dossier, Settings
|
||||||
|
|
||||||
|
API-Basis: `/api/commcoach/{instanceId}/`
|
||||||
|
|
||||||
|
- Contexts CRUD + Archive/Activate
|
||||||
|
- Sessions: Start, Message-Stream, Audio-Stream, Complete, Cancel
|
||||||
|
- Tasks CRUD + Status
|
||||||
|
- Personas CRUD, Documents, Badges, Score History
|
||||||
|
- Voice: Languages, Voices, TTS
|
||||||
|
- Export (Dossier, Session)
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- **Audio-Streaming**: Mikrofon-Aufnahme -> POST Audio-Stream
|
||||||
|
- SSE fuer Session-Nachrichten
|
||||||
|
- Score/Badge-Visualisierung
|
||||||
|
|
||||||
|
#### Phase 15: ChatPlayground (3-5 Tage)
|
||||||
|
|
||||||
|
Views: Playground, Workflows
|
||||||
|
|
||||||
|
API-Basis: `/api/chatplayground/{instanceId}/`
|
||||||
|
|
||||||
|
- Start/Stop Workflow (mit SSE-Stream)
|
||||||
|
- Workflows CRUD + Status/Logs/Messages
|
||||||
|
- Attributes, Actions
|
||||||
|
|
||||||
|
#### Phase 16: Automation (3-5 Tage)
|
||||||
|
|
||||||
|
Views: Definitions, Templates, Logs
|
||||||
|
|
||||||
|
API-Basis: `/api/automations/`
|
||||||
|
|
||||||
|
- Automations CRUD + Execute + Duplicate
|
||||||
|
- Templates CRUD
|
||||||
|
- Workflow-Management (gleiche API wie ChatPlayground, anderer Base-Path)
|
||||||
|
|
||||||
|
#### Phase 17: CodeEditor (3-5 Tage)
|
||||||
|
|
||||||
|
Views: Editor, Workflows
|
||||||
|
|
||||||
|
API-Basis: `/api/codeeditor/{instanceId}/`
|
||||||
|
|
||||||
|
- Start/Stop/Apply (mit SSE-Stream)
|
||||||
|
- ChatData, Workflows, Files, File Content
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- Code-Darstellung: Syntax-Highlighting (z.B. via `Highlightr` SPM Package oder custom)
|
||||||
|
- Diff-Ansicht fuer Code-Apply
|
||||||
|
|
||||||
|
#### Phase 18: RealEstate / PEK (5-7 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard (Map), Instance-Roles
|
||||||
|
|
||||||
|
API-Basis: `/api/realestate/{instanceId}/`
|
||||||
|
|
||||||
|
- Projects + Parcels CRUD
|
||||||
|
- Parcel Search, WFS, Selection Summary, Adjacent Parcels
|
||||||
|
- Address Autocomplete
|
||||||
|
- BZO Information, Parcel Documents
|
||||||
|
- Gemeinden
|
||||||
|
|
||||||
|
Besonderheiten:
|
||||||
|
|
||||||
|
- **MapKit** Integration: Parcel-Visualisierung auf Karte
|
||||||
|
- Address-Autocomplete: MKLocalSearchCompleter oder Backend-API
|
||||||
|
- Komplexe Karteninteraktion (Parcel-Selektion, Adjacent Parcels)
|
||||||
|
|
||||||
|
#### Phase 19: Neutralization (2-3 Tage)
|
||||||
|
|
||||||
|
Views: Dashboard/Playground (gleiche View)
|
||||||
|
|
||||||
|
API-Basis: `/api/neutralization/`
|
||||||
|
|
||||||
|
- Config GET/POST
|
||||||
|
- Neutralize File/Text, Resolve Text
|
||||||
|
- Process SharePoint, Batch Process
|
||||||
|
- Stats, Attributes
|
||||||
|
|
||||||
|
#### Phase 20: Billing View-Erweiterung (1-2 Tage)
|
||||||
|
|
||||||
|
Admin-Billing-Views falls in Phase 9 nicht vollstaendig abgedeckt:
|
||||||
|
|
||||||
|
- Checkout (Stripe -- SFSafariViewController fuer Redirect)
|
||||||
|
- Mandate/User Balances und Transaktionen
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API-Header-Konvention (fuer alle Requests)
|
||||||
|
|
||||||
|
Jeder API-Request muss folgende Header senden (analog `[src/api.ts](frontend_nyla/src/api.ts)`):
|
||||||
|
|
||||||
|
|
||||||
|
| Header | Quelle | Wann |
|
||||||
|
| -------------------------------- | ------------------ | --------------------- |
|
||||||
|
| `Authorization: Bearer {token}` | Keychain | Wenn JWT vorhanden |
|
||||||
|
| `X-Mandate-Id: {mandateId}` | Navigation Context | Bei Feature-Seiten |
|
||||||
|
| `X-Instance-Id: {instanceId}` | Navigation Context | Bei Feature-Seiten |
|
||||||
|
| `X-CSRF-Token: {token}` | CSRFManager | POST/PUT/PATCH/DELETE |
|
||||||
|
| `Content-Type: application/json` | Standard | JSON Bodies |
|
||||||
|
| Cookie (httpOnly) | URLSession | Automatisch |
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Gesamtaufwand-Schaetzung
|
||||||
|
|
||||||
|
|
||||||
|
| Phase | Tage (geschaetzt) |
|
||||||
|
| ------------------------------- | ----------------- |
|
||||||
|
| Phase 0: Setup | 1-2 |
|
||||||
|
| Phase 1: Networking | 3-5 |
|
||||||
|
| Phase 2: Authentication | 3-5 |
|
||||||
|
| Phase 3: Domain Models + Store | 2-3 |
|
||||||
|
| Phase 4: App Shell + Navigation | 4-6 |
|
||||||
|
| Phase 5: i18n + Theme | 2-3 |
|
||||||
|
| Phase 6: Core Pages | 5-7 |
|
||||||
|
| Phase 7: Shared UI Components | 5-8 |
|
||||||
|
| Phase 8: Push Notifications | 2-3 |
|
||||||
|
| Phase 9: Admin | 5-7 |
|
||||||
|
| Phase 10: Trustee | 5-7 |
|
||||||
|
| Phase 11: Workspace | 5-7 |
|
||||||
|
| Phase 12: Chatbot | 3-5 |
|
||||||
|
| Phase 13: Teamsbot | 4-6 |
|
||||||
|
| Phase 14: CommCoach | 4-6 |
|
||||||
|
| Phase 15: ChatPlayground | 3-5 |
|
||||||
|
| Phase 16: Automation | 3-5 |
|
||||||
|
| Phase 17: CodeEditor | 3-5 |
|
||||||
|
| Phase 18: RealEstate | 5-7 |
|
||||||
|
| Phase 19: Neutralization | 2-3 |
|
||||||
|
| Phase 20: Billing Erweit. | 1-2 |
|
||||||
|
| **Gesamt** | **~70-105 Tage** |
|
||||||
|
|
||||||
|
|
||||||
|
Hinweis: Dies ist eine Einzelperson-Schaetzung. Mit Team (z.B. 2-3 Devs) kann parallelisiert werden, besonders ab Phase 10+ (Features sind unabhaengig voneinander).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Offene Punkte / Risiken
|
||||||
|
|
||||||
|
1. **Backend-Anpassungen**: Das Backend setzt teilweise httpOnly Cookies nach Browser-Redirect (MSAL, Google). Fuer eine native App muss das Backend ggf. alternative Token-Flows unterstuetzen (z.B. Device Code Flow oder Token-Exchange).
|
||||||
|
2. **Push Notifications**: Das Backend hat aktuell kein APNs-Token-Management. Ein neuer Endpoint `/api/notifications/register-device` muss im Gateway implementiert werden.
|
||||||
|
3. **SSE ueber POST**: Die Web-App nutzt `fetch` POST + ReadableStream fuer SSE (nicht standard EventSource GET). In Swift muss dies mit `URLSession.bytes(for:)` nachgebaut werden.
|
||||||
|
4. **Stripe Checkout**: Im Web oeffnet sich ein Stripe-Redirect. In iOS: SFSafariViewController oder Stripe iOS SDK.
|
||||||
|
5. **SharePoint Integration**: Einige Features nutzen SharePoint-Folder-Picker. In iOS muss eine alternative UI gebaut werden (Liste statt Filepicker).
|
||||||
|
6. **WebSocket Auth**: Der Web-Client nutzt Cookies fuer WebSocket-Auth. iOS `URLSessionWebSocketTask` unterstuetzt Cookies via URLSession Configuration.
|
||||||
|
|
||||||
30
.forgejo/workflows/deploy.yml
Normal file
30
.forgejo/workflows/deploy.yml
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
name: Deploy Gateway
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Deploy to Infomaniak VM
|
||||||
|
env:
|
||||||
|
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.ssh
|
||||||
|
echo "$SSH_PRIVATE_KEY" > ~/.ssh/deploy_key
|
||||||
|
chmod 600 ~/.ssh/deploy_key
|
||||||
|
echo "StrictHostKeyChecking=no" >> ~/.ssh/config
|
||||||
|
echo "UserKnownHostsFile=/dev/null" >> ~/.ssh/config
|
||||||
|
ssh -i ~/.ssh/deploy_key ubuntu@api.poweron.swiss "
|
||||||
|
cd /srv/gateway/current &&
|
||||||
|
git remote set-url origin ssh://git@git.poweron.swiss:2222/PowerOn/gateway.git &&
|
||||||
|
git pull &&
|
||||||
|
cp env_prod_forgejo.env .env &&
|
||||||
|
rm -f env_*.env &&
|
||||||
|
source .venv/bin/activate &&
|
||||||
|
pip install -r requirements.txt --no-cache-dir &&
|
||||||
|
sudo systemctl restart gateway
|
||||||
|
"
|
||||||
30
app.py
30
app.py
|
|
@ -310,10 +310,18 @@ async def lifespan(app: FastAPI):
|
||||||
# Register all feature definitions in RBAC catalog (for /api/features/ endpoint)
|
# Register all feature definitions in RBAC catalog (for /api/features/ endpoint)
|
||||||
try:
|
try:
|
||||||
from modules.security.rbacCatalog import getCatalogService
|
from modules.security.rbacCatalog import getCatalogService
|
||||||
from modules.system.registry import registerAllFeaturesInCatalog
|
from modules.system.registry import registerAllFeaturesInCatalog, syncCatalogFeaturesToDb
|
||||||
catalogService = getCatalogService()
|
catalogService = getCatalogService()
|
||||||
registerAllFeaturesInCatalog(catalogService)
|
registerAllFeaturesInCatalog(catalogService)
|
||||||
logger.info("Feature catalog registration completed")
|
logger.info("Feature catalog registration completed")
|
||||||
|
# Persist the in-memory feature registry into the Feature DB-table so
|
||||||
|
# the FeatureInstance.featureCode FK has real targets. Without this
|
||||||
|
# every FeatureInstance row would be flagged as orphan by the
|
||||||
|
# SysAdmin DB-health scan (cf. interfaceFeatures.upsertFeature).
|
||||||
|
try:
|
||||||
|
syncCatalogFeaturesToDb(catalogService)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Feature DB sync failed: {e}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Feature catalog registration failed: {e}")
|
logger.error(f"Feature catalog registration failed: {e}")
|
||||||
|
|
||||||
|
|
@ -380,6 +388,15 @@ async def lifespan(app: FastAPI):
|
||||||
from modules.shared.auditLogger import registerAuditLogCleanupScheduler
|
from modules.shared.auditLogger import registerAuditLogCleanupScheduler
|
||||||
registerAuditLogCleanupScheduler()
|
registerAuditLogCleanupScheduler()
|
||||||
|
|
||||||
|
# Recover background jobs that were RUNNING when the previous worker died
|
||||||
|
try:
|
||||||
|
from modules.serviceCenter.services.serviceBackgroundJobs.mainBackgroundJobService import (
|
||||||
|
recoverInterruptedJobs,
|
||||||
|
)
|
||||||
|
recoverInterruptedJobs()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"BackgroundJob recovery failed (non-critical): {e}")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# --- Stop Managers ---
|
# --- Stop Managers ---
|
||||||
|
|
@ -502,14 +519,18 @@ from modules.auth import (
|
||||||
ProactiveTokenRefreshMiddleware,
|
ProactiveTokenRefreshMiddleware,
|
||||||
)
|
)
|
||||||
|
|
||||||
# i18n language detection middleware (sets per-request language from Accept-Language header)
|
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
|
||||||
|
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
|
||||||
|
# without having to thread them through every call site.
|
||||||
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
|
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
|
||||||
|
from modules.shared.timeUtils import _setRequestTimezone
|
||||||
|
|
||||||
@app.middleware("http")
|
@app.middleware("http")
|
||||||
async def _i18nMiddleware(request: Request, call_next):
|
async def _requestContextMiddleware(request: Request, call_next):
|
||||||
acceptLang = request.headers.get("Accept-Language", "")
|
acceptLang = request.headers.get("Accept-Language", "")
|
||||||
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
||||||
_setLanguage(lang)
|
_setLanguage(lang)
|
||||||
|
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||||
return await call_next(request)
|
return await call_next(request)
|
||||||
|
|
||||||
app.add_middleware(CSRFMiddleware)
|
app.add_middleware(CSRFMiddleware)
|
||||||
|
|
@ -627,6 +648,9 @@ app.include_router(billingRouter)
|
||||||
from modules.routes.routeSubscription import router as subscriptionRouter
|
from modules.routes.routeSubscription import router as subscriptionRouter
|
||||||
app.include_router(subscriptionRouter)
|
app.include_router(subscriptionRouter)
|
||||||
|
|
||||||
|
from modules.routes.routeJobs import router as jobsRouter
|
||||||
|
app.include_router(jobsRouter)
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# SYSTEM ROUTES (Navigation, etc.)
|
# SYSTEM ROUTES (Navigation, etc.)
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
|
||||||
125
demoData/pwg/_generateScans.py
Normal file
125
demoData/pwg/_generateScans.py
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
"""Generate the 3 fictitious PWG scan PDFs used by the pilot demo.
|
||||||
|
|
||||||
|
Run: python _generateScans.py
|
||||||
|
|
||||||
|
Produces:
|
||||||
|
scans/mieter01-bestaetigt.pdf -> all fields ok, signed
|
||||||
|
scans/mieter02-abweichung-betrag.pdf -> rent on scan != journal lines
|
||||||
|
scans/mieter03-keine-unterschrift.pdf -> hasSignature=false
|
||||||
|
"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from reportlab.lib.pagesizes import A4
|
||||||
|
from reportlab.pdfgen import canvas
|
||||||
|
|
||||||
|
|
||||||
|
def _renderForm(outPath: Path, *, tenantName: str, tenantAddress: str,
|
||||||
|
objectAddress: str, period: str, rentChf: float,
|
||||||
|
tenantNotes: str, hasSignature: bool) -> None:
|
||||||
|
c = canvas.Canvas(str(outPath), pagesize=A4)
|
||||||
|
w, h = A4
|
||||||
|
margin = 60
|
||||||
|
y = h - margin
|
||||||
|
|
||||||
|
c.setFont("Helvetica-Bold", 16)
|
||||||
|
c.drawString(margin, y, "Stiftung PWG")
|
||||||
|
y -= 18
|
||||||
|
c.setFont("Helvetica", 10)
|
||||||
|
c.drawString(margin, y, "Postfach 1234 · 8000 Zürich")
|
||||||
|
y -= 30
|
||||||
|
|
||||||
|
c.setFont("Helvetica-Bold", 14)
|
||||||
|
c.drawString(margin, y, f"Jahresmietzinsbestätigung {period}")
|
||||||
|
y -= 28
|
||||||
|
|
||||||
|
c.setFont("Helvetica", 11)
|
||||||
|
c.drawString(margin, y, "Sehr geehrte Damen und Herren,")
|
||||||
|
y -= 18
|
||||||
|
c.drawString(margin, y, "hiermit bestätige ich die nachstehenden Angaben für die o.g. Periode:")
|
||||||
|
y -= 28
|
||||||
|
|
||||||
|
rows = [
|
||||||
|
("Mieter / in:", tenantName),
|
||||||
|
("Wohnadresse:", tenantAddress),
|
||||||
|
("Mietobjekt:", objectAddress),
|
||||||
|
("Periode:", period),
|
||||||
|
("Bestätigter Mietzins (CHF, monatlich):", f"{rentChf:.2f}"),
|
||||||
|
("Anmerkungen:", tenantNotes or "(keine)"),
|
||||||
|
]
|
||||||
|
c.setFont("Helvetica", 11)
|
||||||
|
for lab, val in rows:
|
||||||
|
c.drawString(margin, y, lab)
|
||||||
|
c.drawString(margin + 220, y, str(val))
|
||||||
|
y -= 18
|
||||||
|
y -= 28
|
||||||
|
|
||||||
|
c.drawString(margin, y, "Ort, Datum: Zürich, 12.04.2026")
|
||||||
|
y -= 28
|
||||||
|
c.drawString(margin, y, "Unterschrift Mieter / in:")
|
||||||
|
y -= 36
|
||||||
|
|
||||||
|
if hasSignature:
|
||||||
|
c.setFont("Helvetica-Oblique", 14)
|
||||||
|
c.drawString(margin + 220, y + 24, _signatureFor(tenantName))
|
||||||
|
else:
|
||||||
|
c.setFont("Helvetica", 9)
|
||||||
|
c.drawString(margin + 220, y + 24, "(handschriftlich)")
|
||||||
|
c.line(margin + 215, y + 22, margin + 415, y + 22)
|
||||||
|
|
||||||
|
c.showPage()
|
||||||
|
c.save()
|
||||||
|
|
||||||
|
|
||||||
|
def _signatureFor(name: str) -> str:
|
||||||
|
parts = name.split()
|
||||||
|
if not parts:
|
||||||
|
return "____"
|
||||||
|
return parts[0][0] + ". " + parts[-1]
|
||||||
|
|
||||||
|
|
||||||
|
def _main() -> None:
|
||||||
|
here = Path(__file__).resolve().parent
|
||||||
|
outDir = here / "scans"
|
||||||
|
outDir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# 1) bestätigt — exakt passend zu seed (Anna Müller, 1850.00)
|
||||||
|
_renderForm(
|
||||||
|
outDir / "mieter01-bestaetigt.pdf",
|
||||||
|
tenantName="Anna Müller",
|
||||||
|
tenantAddress="Bahnhofstrasse 12, 8001 Zürich",
|
||||||
|
objectAddress="Bahnhofstrasse 12, 3.OG, 8001 Zürich",
|
||||||
|
period="2026",
|
||||||
|
rentChf=1850.00,
|
||||||
|
tenantNotes="",
|
||||||
|
hasSignature=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2) abweichung_betrag — Mieter trägt 2300 ein, Buchhaltung sagt 2200
|
||||||
|
_renderForm(
|
||||||
|
outDir / "mieter02-abweichung-betrag.pdf",
|
||||||
|
tenantName="Beat Schneider",
|
||||||
|
tenantAddress="Limmatquai 45, 8001 Zürich",
|
||||||
|
objectAddress="Limmatquai 45, 1.OG, 8001 Zürich",
|
||||||
|
period="2026",
|
||||||
|
rentChf=2300.00,
|
||||||
|
tenantNotes="Mietzins gemäss letzter Indexanpassung — bitte prüfen.",
|
||||||
|
hasSignature=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 3) keine_unterschrift — Carla Weber, 1650 stimmt, aber nicht unterschrieben
|
||||||
|
_renderForm(
|
||||||
|
outDir / "mieter03-keine-unterschrift.pdf",
|
||||||
|
tenantName="Carla Weber",
|
||||||
|
tenantAddress="Seestrasse 88, 8002 Zürich",
|
||||||
|
objectAddress="Seestrasse 88, EG, 8002 Zürich",
|
||||||
|
period="2026",
|
||||||
|
rentChf=1650.00,
|
||||||
|
tenantNotes="",
|
||||||
|
hasSignature=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f"Generated 3 scans in {outDir}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
_main()
|
||||||
68
demoData/pwg/_seedTrusteeData.json
Normal file
68
demoData/pwg/_seedTrusteeData.json
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
{
|
||||||
|
"_comment": "PWG-Demo Seed-Daten — fiktive Mieter (Debitoren) und Mietzins-Buchungen 2026 für Trustee-Feature. Wird von pwgDemo2026.py idempotent geladen.",
|
||||||
|
"rentAccount": "6000",
|
||||||
|
"rentAccountLabel": "Mietzinsertrag Wohnen",
|
||||||
|
"year": 2026,
|
||||||
|
"tenants": [
|
||||||
|
{
|
||||||
|
"contactNumber": "10001",
|
||||||
|
"name": "Anna Müller",
|
||||||
|
"address": "Bahnhofstrasse 12",
|
||||||
|
"zip": "8001",
|
||||||
|
"city": "Zürich",
|
||||||
|
"country": "CH",
|
||||||
|
"email": "anna.mueller@example.ch",
|
||||||
|
"monthlyRentChf": 1850.00,
|
||||||
|
"scenario": "bestaetigt",
|
||||||
|
"_note": "Stimmt exakt — erwarteter Pilot-Status 'bestaetigt'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"contactNumber": "10002",
|
||||||
|
"name": "Beat Schneider",
|
||||||
|
"address": "Limmatquai 45",
|
||||||
|
"zip": "8001",
|
||||||
|
"city": "Zürich",
|
||||||
|
"country": "CH",
|
||||||
|
"email": "beat.schneider@example.ch",
|
||||||
|
"monthlyRentChf": 2200.00,
|
||||||
|
"scenario": "abweichung_betrag",
|
||||||
|
"_note": "Scan zeigt 2300 CHF/Monat (Mieter nicht über Erhöhung informiert) — erwarteter Status 'abweichung_betrag'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"contactNumber": "10003",
|
||||||
|
"name": "Carla Weber",
|
||||||
|
"address": "Seestrasse 88",
|
||||||
|
"zip": "8002",
|
||||||
|
"city": "Zürich",
|
||||||
|
"country": "CH",
|
||||||
|
"email": "carla.weber@example.ch",
|
||||||
|
"monthlyRentChf": 1650.00,
|
||||||
|
"scenario": "keine_unterschrift",
|
||||||
|
"_note": "Scan ist ohne Unterschrift — erwarteter Status 'keine_unterschrift'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"contactNumber": "10004",
|
||||||
|
"name": "Daniel Keller",
|
||||||
|
"address": "Hardturmstrasse 200",
|
||||||
|
"zip": "8005",
|
||||||
|
"city": "Zürich",
|
||||||
|
"country": "CH",
|
||||||
|
"email": "daniel.keller@example.ch",
|
||||||
|
"monthlyRentChf": 2450.00,
|
||||||
|
"scenario": "kein_scan",
|
||||||
|
"_note": "Hat noch nicht zurückgesendet — taucht nicht im Pilot-Run auf"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"contactNumber": "10005",
|
||||||
|
"name": "Elena Fischer",
|
||||||
|
"address": "Rämistrasse 71",
|
||||||
|
"zip": "8001",
|
||||||
|
"city": "Zürich",
|
||||||
|
"country": "CH",
|
||||||
|
"email": "elena.fischer@example.ch",
|
||||||
|
"monthlyRentChf": 1990.00,
|
||||||
|
"scenario": "kein_scan",
|
||||||
|
"_note": "Reserve-Mieter für spätere Demo-Erweiterungen"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
80
demoData/pwg/scans/mieter01-bestaetigt.pdf
Normal file
80
demoData/pwg/scans/mieter01-bestaetigt.pdf
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
%PDF-1.3
|
||||||
|
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
|
||||||
|
1 0 obj
|
||||||
|
<<
|
||||||
|
/F1 2 0 R /F2 3 0 R /F3 4 0 R
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
2 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
3 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
4 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica-Oblique /Encoding /WinAnsiEncoding /Name /F3 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
5 0 obj
|
||||||
|
<<
|
||||||
|
/Contents 9 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 8 0 R /Resources <<
|
||||||
|
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
|
||||||
|
>> /Rotate 0 /Trans <<
|
||||||
|
|
||||||
|
>>
|
||||||
|
/Type /Page
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
6 0 obj
|
||||||
|
<<
|
||||||
|
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
7 0 obj
|
||||||
|
<<
|
||||||
|
/Author (anonymous) /CreationDate (D:20260420002638-01'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20260420002638-01'00') /Producer (ReportLab PDF Library - www.reportlab.com)
|
||||||
|
/Subject (unspecified) /Title (untitled) /Trapped /False
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
8 0 obj
|
||||||
|
<<
|
||||||
|
/Count 1 /Kids [ 5 0 R ] /Type /Pages
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
9 0 obj
|
||||||
|
<<
|
||||||
|
/Filter [ /ASCII85Decode /FlateDecode ] /Length 605
|
||||||
|
>>
|
||||||
|
stream
|
||||||
|
Gat$u_2b!='YNm9]OOh_`s.;Y\Ku+!X/aQ:.b.-A/gNQpRp[N%>l++NBXO3A:fg1WZM\=sbo<,Q[3'29Es](/@'O@[I#'OcS8a:5_Y<8fh=lJSmJ`RLh*-1@#UuhX,=8I86m^'+)4?n^b2N-d3/?],U+[TZQ@ZJ8,<0,Yi>eoPABDBdLBA$k+0Ik*9&VW;<a<ghE=ZquneO>5@Mh:Ji.!#+`k%CJr^^%]YVpL:\WM.^h5>]]TUiL[_3bUPl*u7tL)fSq&ABG:._)GlSks3%?6@q<#fWg]-m\(U)K<V<fZ#)"#g-=L)_=g^(43+QjCJ9nCJK5L+ut3!C0@CCq/eFOEnq$^=I2k%!i4NY9D?D2a]>AD%ZQqC(%lgdge#da<N%1N;lT3hpLr?F>uIVqb%d[b>@jSh2'HC<+`WqKT\j."HGbZ/,'GI@L]d5Gq#Bu(=GEa'j*$L`Rna35kpC)q-)VX=iB?Q>cb;U14X_hGR&cJicR65LLeK?KTlcegm"M*#IBaRqVfL6:M.[Wh$KLqAK0+g#D*30YbcTZBVL*J+KQ8j4'43h]r`7UAqHR_2FMW4U(].V2NG5u__ND;RK6I;:rW6,"=tf~>endstream
|
||||||
|
endobj
|
||||||
|
xref
|
||||||
|
0 10
|
||||||
|
0000000000 65535 f
|
||||||
|
0000000073 00000 n
|
||||||
|
0000000124 00000 n
|
||||||
|
0000000231 00000 n
|
||||||
|
0000000343 00000 n
|
||||||
|
0000000458 00000 n
|
||||||
|
0000000661 00000 n
|
||||||
|
0000000729 00000 n
|
||||||
|
0000001025 00000 n
|
||||||
|
0000001084 00000 n
|
||||||
|
trailer
|
||||||
|
<<
|
||||||
|
/ID
|
||||||
|
[<621e745f4154d3ac7a42de07bdd8794e><621e745f4154d3ac7a42de07bdd8794e>]
|
||||||
|
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
|
||||||
|
|
||||||
|
/Info 7 0 R
|
||||||
|
/Root 6 0 R
|
||||||
|
/Size 10
|
||||||
|
>>
|
||||||
|
startxref
|
||||||
|
1779
|
||||||
|
%%EOF
|
||||||
80
demoData/pwg/scans/mieter02-abweichung-betrag.pdf
Normal file
80
demoData/pwg/scans/mieter02-abweichung-betrag.pdf
Normal file
|
|
@ -0,0 +1,80 @@
|
||||||
|
%PDF-1.3
|
||||||
|
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
|
||||||
|
1 0 obj
|
||||||
|
<<
|
||||||
|
/F1 2 0 R /F2 3 0 R /F3 4 0 R
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
2 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
3 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
4 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica-Oblique /Encoding /WinAnsiEncoding /Name /F3 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
5 0 obj
|
||||||
|
<<
|
||||||
|
/Contents 9 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 8 0 R /Resources <<
|
||||||
|
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
|
||||||
|
>> /Rotate 0 /Trans <<
|
||||||
|
|
||||||
|
>>
|
||||||
|
/Type /Page
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
6 0 obj
|
||||||
|
<<
|
||||||
|
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
7 0 obj
|
||||||
|
<<
|
||||||
|
/Author (anonymous) /CreationDate (D:20260420002638-01'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20260420002638-01'00') /Producer (ReportLab PDF Library - www.reportlab.com)
|
||||||
|
/Subject (unspecified) /Title (untitled) /Trapped /False
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
8 0 obj
|
||||||
|
<<
|
||||||
|
/Count 1 /Kids [ 5 0 R ] /Type /Pages
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
9 0 obj
|
||||||
|
<<
|
||||||
|
/Filter [ /ASCII85Decode /FlateDecode ] /Length 645
|
||||||
|
>>
|
||||||
|
stream
|
||||||
|
Gat$u;/=o?&:X)O\Araq7?SD+a]l*Rm7'_NodC6`..P8W>KNG3^t>i_Ce?:WES)tdE9P%)]=[OMJ;0,-k>h\dEgU/f?l\_X0L5j&\*u7>lf&mdg;Ok]pMom2O]%QZN+CTcK3Z=iK3(.L2\iD9Y:h#JK)F(Z;IH.<dKiU:oJl0Vq46<liGp-8i9;4:8h'ZhP.@f3>9AG%RA'dZ8Tl(;M;Z.lg7m%'r?#V+#+[C[+hXgYl(%>:Lj%@c-Y$GTZ`"76>Gs6G*oW%,BOGaN\3XoX9SV137[hSKN*;q*b!REa+VYE_685)jc=;j2%+poDP+1suFj9/'1o)>"7]VsjQiC>b3a;5CmR!8e_A&5;*gb0YK9R*C%hIFKTIS?Lf./'.4>sU0AXJ?:'Ki%F;f7lOdf8#o"_'B(%Dp*n'!q.>=Br1X_In@U1sS''A`Wjehl1+L*1tN,2no:=PnEL:G0[+39KTbr2jZmOrqY\k!kL,7^BBtD`<kr5,#9U5P`F4jdI8fK7f+/@#uCA.ORb$/6JX,8%UMJt<W=X1r3nMdd^aN[$dRq>;*O?sX)7aI6USk9`Ike3IM.son+Et.<>Zi+<03="'oQ`85>71#[^?PT*K9I,oI;ls,.0QF=X7oSNc#8qr<64SCKL~>endstream
|
||||||
|
endobj
|
||||||
|
xref
|
||||||
|
0 10
|
||||||
|
0000000000 65535 f
|
||||||
|
0000000073 00000 n
|
||||||
|
0000000124 00000 n
|
||||||
|
0000000231 00000 n
|
||||||
|
0000000343 00000 n
|
||||||
|
0000000458 00000 n
|
||||||
|
0000000661 00000 n
|
||||||
|
0000000729 00000 n
|
||||||
|
0000001025 00000 n
|
||||||
|
0000001084 00000 n
|
||||||
|
trailer
|
||||||
|
<<
|
||||||
|
/ID
|
||||||
|
[<c69a670760cbecedce0d0f0aa897bce2><c69a670760cbecedce0d0f0aa897bce2>]
|
||||||
|
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
|
||||||
|
|
||||||
|
/Info 7 0 R
|
||||||
|
/Root 6 0 R
|
||||||
|
/Size 10
|
||||||
|
>>
|
||||||
|
startxref
|
||||||
|
1819
|
||||||
|
%%EOF
|
||||||
74
demoData/pwg/scans/mieter03-keine-unterschrift.pdf
Normal file
74
demoData/pwg/scans/mieter03-keine-unterschrift.pdf
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|
%PDF-1.3
|
||||||
|
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
|
||||||
|
1 0 obj
|
||||||
|
<<
|
||||||
|
/F1 2 0 R /F2 3 0 R
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
2 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
3 0 obj
|
||||||
|
<<
|
||||||
|
/BaseFont /Helvetica-Bold /Encoding /WinAnsiEncoding /Name /F2 /Subtype /Type1 /Type /Font
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
4 0 obj
|
||||||
|
<<
|
||||||
|
/Contents 8 0 R /MediaBox [ 0 0 595.2756 841.8898 ] /Parent 7 0 R /Resources <<
|
||||||
|
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
|
||||||
|
>> /Rotate 0 /Trans <<
|
||||||
|
|
||||||
|
>>
|
||||||
|
/Type /Page
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
5 0 obj
|
||||||
|
<<
|
||||||
|
/PageMode /UseNone /Pages 7 0 R /Type /Catalog
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
6 0 obj
|
||||||
|
<<
|
||||||
|
/Author (anonymous) /CreationDate (D:20260420002638-01'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20260420002638-01'00') /Producer (ReportLab PDF Library - www.reportlab.com)
|
||||||
|
/Subject (unspecified) /Title (untitled) /Trapped /False
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
7 0 obj
|
||||||
|
<<
|
||||||
|
/Count 1 /Kids [ 4 0 R ] /Type /Pages
|
||||||
|
>>
|
||||||
|
endobj
|
||||||
|
8 0 obj
|
||||||
|
<<
|
||||||
|
/Filter [ /ASCII85Decode /FlateDecode ] /Length 629
|
||||||
|
>>
|
||||||
|
stream
|
||||||
|
Gat$u9okbt'YNU1]OOh_epoJMDS+[%:t8PjKdtN.M\BF4Rp[N%>l*c%BN.Y4;--:2/AITuo>V8jfI,n>q[27)KtHLJJe6?4"Os?2IYXhCeua]=Y\nmRL])O<JRATn*r)6Y3M[D62b"k4=V\0t^+:E*JFq#l,g/G6U^8"Vof29K0aFs:mH03k(:"'&+U$Z..%si4bA2&IPBPm.kMu&o"92)[)Oj?nq'B%I_o?4!V+)6&oT4`B7!m:s7oM%%fPppb%0bIp622oZ<,bku]V<uU]HO_9_0FC<PS/*b%63>YCu^UM"D+]L%$mi4Mg_c9Z*W=TB25q0p'VtnW+DO[lI4"^GhEIMZS%r+4-427/j88s-'(Bb"Di(5HFd8E`+E5?9&t.@c*c7+LKh&MCQ'%;!]]r.FG*TWE*:(lfNGob^n\G/l;h/P5/$kYZ($gE_$jH%mJdC=!KQ!_4S3&rBD-KT3+VX$f4PVo=p]8U1:+q/mK$e4@cA%V:!]??hl@+Wd@MMo'pV'V2F!p8Qn>0Qg]@?"`j7&8S?#Y.\n>pfT2>Qb:NYh\qGUODRXM1&D$AAhDi`&H4"4_,<b\%s4E?o?Kuu'YIscD>'nf.p$SEU*J@`KCfZ[as)_0uXW;~>endstream
|
||||||
|
endobj
|
||||||
|
xref
|
||||||
|
0 9
|
||||||
|
0000000000 65535 f
|
||||||
|
0000000073 00000 n
|
||||||
|
0000000114 00000 n
|
||||||
|
0000000221 00000 n
|
||||||
|
0000000333 00000 n
|
||||||
|
0000000536 00000 n
|
||||||
|
0000000604 00000 n
|
||||||
|
0000000900 00000 n
|
||||||
|
0000000959 00000 n
|
||||||
|
trailer
|
||||||
|
<<
|
||||||
|
/ID
|
||||||
|
[<9b415a84726399a7dd006f60068c5362><9b415a84726399a7dd006f60068c5362>]
|
||||||
|
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
|
||||||
|
|
||||||
|
/Info 6 0 R
|
||||||
|
/Root 5 0 R
|
||||||
|
/Size 9
|
||||||
|
>>
|
||||||
|
startxref
|
||||||
|
1678
|
||||||
|
%%EOF
|
||||||
152
demoData/workflows/pwg-mietzinsbestaetigung-pilot.workflow.json
Normal file
152
demoData/workflows/pwg-mietzinsbestaetigung-pilot.workflow.json
Normal file
|
|
@ -0,0 +1,152 @@
|
||||||
|
{
|
||||||
|
"$schemaVersion": "1.0",
|
||||||
|
"$kind": "poweron.workflow",
|
||||||
|
"$exportedAt": "2026-04-16T10:00:00Z",
|
||||||
|
"$gatewayVersion": "demo-2026-04",
|
||||||
|
"label": "PWG Pilot: Jahresmietzinsbestätigung",
|
||||||
|
"description": "Verarbeitet gescannte Rückantworten der Jahresmietzinsbestätigungen: OCR, Abgleich gegen Trustee-DB (Mieter + Mietzins-Buchungen), AI-Klassifikation pro Scan und Zustellung als CSV-Anhang im Outlook-Draft an die Sachbearbeitung. Pilot-Lieferung Sommer 2026.",
|
||||||
|
"tags": ["pwg", "pilot", "mietzins", "trustee", "ocr"],
|
||||||
|
"templateScope": "instance",
|
||||||
|
"sharedReadOnly": false,
|
||||||
|
"notifyOnFailure": true,
|
||||||
|
"graph": {
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "n1",
|
||||||
|
"type": "trigger.manual",
|
||||||
|
"x": 50,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Manueller Start",
|
||||||
|
"parameters": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n2",
|
||||||
|
"type": "sharepoint.listFiles",
|
||||||
|
"x": 320,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Scan-Ordner auflisten",
|
||||||
|
"parameters": {
|
||||||
|
"connectionReference": "",
|
||||||
|
"pathQuery": "PWG/Mietzinsbestaetigungen/Scans-Eingang"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n3",
|
||||||
|
"type": "flow.loop",
|
||||||
|
"x": 590,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Pro Scan-Dokument",
|
||||||
|
"parameters": {
|
||||||
|
"level": 1,
|
||||||
|
"concurrency": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n4",
|
||||||
|
"type": "sharepoint.downloadFile",
|
||||||
|
"x": 860,
|
||||||
|
"y": 200,
|
||||||
|
"title": "PDF/Bild laden",
|
||||||
|
"parameters": {
|
||||||
|
"connectionReference": "",
|
||||||
|
"pathQuery": "{{loop.item.path}}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n5",
|
||||||
|
"type": "trustee.extractFromFiles",
|
||||||
|
"x": 1130,
|
||||||
|
"y": 200,
|
||||||
|
"title": "OCR & Felder extrahieren",
|
||||||
|
"parameters": {
|
||||||
|
"featureInstanceId": "",
|
||||||
|
"prompt": "Extrahiere die folgenden Felder aus dieser Jahresmietzinsbestätigung und antworte als JSON: tenantName (string), tenantAddress (string), objectAddress (string), confirmedRentAmount (number|null in CHF), currency ('CHF'), period (string z.B. '2026'), tenantNotes (string|null - alle handschriftlichen Anmerkungen oder Korrekturen), hasSignature (boolean - ist eine Unterschrift vorhanden?), documentDate (ISO date|null), ocrConfidence (number 0-1)."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n6",
|
||||||
|
"type": "trustee.queryData",
|
||||||
|
"x": 1400,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Referenzdaten Trustee-DB",
|
||||||
|
"parameters": {
|
||||||
|
"featureInstanceId": "",
|
||||||
|
"mode": "lookup",
|
||||||
|
"entity": "tenantWithRent",
|
||||||
|
"tenantNameRef": "{{n5.output.tenantName}}",
|
||||||
|
"tenantAddressRef": "{{n5.output.tenantAddress}}",
|
||||||
|
"period": "{{n5.output.period}}",
|
||||||
|
"rentAccountPattern": "6000-6099"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n7",
|
||||||
|
"type": "ai.prompt",
|
||||||
|
"x": 1670,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Prüfung & Klassifikation",
|
||||||
|
"parameters": {
|
||||||
|
"outputFormat": "json",
|
||||||
|
"simpleMode": false,
|
||||||
|
"documentList": "{{n5.output}}",
|
||||||
|
"context": "{{n6.output}}",
|
||||||
|
"aiPrompt": "Du bist ein Sachbearbeitungs-Assistent der Stiftung PWG. Deine Aufgabe ist es, eine eingescannte und OCR-extrahierte Jahresmietzinsbestätigung gegen die Stammdaten der Buchhaltung (Trustee-Feature) abzugleichen.\n\nEingaben:\n1. SCAN_DATEN (extrahiert per OCR aus dem Rückantwort-Dokument):\n{{scan}}\n\n2. REFERENZ_DATEN (aus Trustee-DB für diesen Mieter; ggf. leer wenn nicht eindeutig zuordenbar):\n{{reference}}\n\nVorgehen:\n1. Prüfe Identität: Stimmt SCAN_DATEN.tenantName + SCAN_DATEN.tenantAddress mit einem Datensatz in REFERENZ_DATEN.contacts überein? (Toleranz: kleine Tippfehler, Umlaute, Abkürzungen).\n2. Prüfe Mietzinsbetrag: Stimmt SCAN_DATEN.confirmedRentAmount mit dem aus REFERENZ_DATEN.expectedRentAmount erwarteten Mietzins überein? (Toleranz: ±1 CHF Rundung).\n3. Prüfe Unterschrift: hasSignature muss true sein.\n4. Prüfe OCR-Qualität: ocrConfidence < 0.6 -> 'unleserlich'.\n\nKlassifiziere in EXAKT EINEN Status:\n- 'bestaetigt': Identität stimmt, Betrag stimmt, Unterschrift vorhanden.\n- 'abweichung_betrag': Identität ok, Unterschrift ok, Betrag weicht ab.\n- 'abweichung_anmerkung': tenantNotes enthält substantielle Anmerkung (nicht leer, nicht reine Bestätigung).\n- 'keine_unterschrift': hasSignature == false.\n- 'unleserlich': OCR-Qualität ungenügend ODER Pflichtfelder fehlen.\n- 'kein_match': Mieter nicht in REFERENZ_DATEN auffindbar.\n\nBei Status != 'bestaetigt': Generiere einen kurzen, höflichen Antwortvorschlag (deutsch, Sie-Form, max. 5 Sätze, PWG-Stil) für die Sachbearbeitung. Bei 'bestaetigt': antwortVorschlag = null.\n\nAntworte AUSSCHLIESSLICH als JSON nach folgendem Schema:\n{\n \"tenantName\": string,\n \"objectAddress\": string,\n \"status\": \"bestaetigt\" | \"abweichung_betrag\" | \"abweichung_anmerkung\" | \"keine_unterschrift\" | \"unleserlich\" | \"kein_match\",\n \"scanRentAmount\": number | null,\n \"expectedRentAmount\": number | null,\n \"delta\": number | null,\n \"tenantNotes\": string | null,\n \"antwortVorschlag\": string | null,\n \"matchConfidence\": number,\n \"auditEvidence\": string\n}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n8",
|
||||||
|
"type": "data.aggregate",
|
||||||
|
"x": 1940,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Ergebnisse sammeln (im Loop)",
|
||||||
|
"parameters": {
|
||||||
|
"mode": "collect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n9",
|
||||||
|
"type": "data.consolidate",
|
||||||
|
"x": 2210,
|
||||||
|
"y": 200,
|
||||||
|
"title": "CSV bauen (nach Loop)",
|
||||||
|
"parameters": {
|
||||||
|
"mode": "csvJoin",
|
||||||
|
"separator": "\n"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "n10",
|
||||||
|
"type": "email.draftEmail",
|
||||||
|
"x": 2480,
|
||||||
|
"y": 200,
|
||||||
|
"title": "Draft an Sachbearbeitung",
|
||||||
|
"parameters": {
|
||||||
|
"connectionReference": "",
|
||||||
|
"to": "sachbearbeiter@pwg.ch",
|
||||||
|
"subject": "Mietzinsbestätigungen Auswertung {{currentDate}}",
|
||||||
|
"body": "Hallo,\n\nim Anhang die Auswertung der eingegangenen Jahresmietzinsbestätigungen.\nPro Scan eine Zeile mit Status, Betragsabgleich und (bei Abweichung) Antwortvorschlag.\n\nBitte die Zeilen mit Status != 'bestaetigt' manuell sichten.\n\nFreundliche Grüße,\nPWG Automation",
|
||||||
|
"emailStyle": "business",
|
||||||
|
"attachments": [
|
||||||
|
{
|
||||||
|
"name": "mietzinsbestaetigungen-auswertung",
|
||||||
|
"mimeType": "text/csv",
|
||||||
|
"csvFromVariable": "n9.output"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"connections": [
|
||||||
|
{"source": "n1", "target": "n2", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n2", "target": "n3", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n3", "target": "n4", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n4", "target": "n5", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n5", "target": "n6", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n6", "target": "n7", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n7", "target": "n8", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n8", "target": "n9", "sourceOutput": 0, "targetInput": 0},
|
||||||
|
{"source": "n9", "target": "n10", "sourceOutput": 0, "targetInput": 0}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"invocations": []
|
||||||
|
}
|
||||||
309
docs/althaus-bot-v2-aufwandsschaetzung.md
Normal file
309
docs/althaus-bot-v2-aufwandsschaetzung.md
Normal file
|
|
@ -0,0 +1,309 @@
|
||||||
|
# Aufwandsschätzung Althaus Bot v2 -- Unabhängige Analyse
|
||||||
|
|
||||||
|
**Projekt:** Althaus Bot v2 -- Weiterentwicklung & neue Use Cases
|
||||||
|
**Kunde:** W. Althaus AG, Aarwangen
|
||||||
|
**Erstellt:** 13. April 2026
|
||||||
|
**Basis:** Code-Analyse Gateway-Repository + Offerte v2 vom 14.04.2026
|
||||||
|
**Methodik:** Bottom-Up-Schätzung auf Basis der bestehenden Implementierung, Dreipunktschätzung (Min / Mitte / Max)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Ist-Zustand der Implementierung
|
||||||
|
|
||||||
|
### 1.1 Architekturübersicht
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ React Frontend (SSE-Streaming, Chat-UI) │
|
||||||
|
└──────────────────────────┬──────────────────────────────────────┘
|
||||||
|
│ /api/chatbot/*
|
||||||
|
┌──────────────────────────▼──────────────────────────────────────┐
|
||||||
|
│ Gateway (Python/FastAPI) │
|
||||||
|
│ ┌─────────────────────────────────────────────────────────┐ │
|
||||||
|
│ │ Chatbot Feature (modules/features/chatbot/) │ │
|
||||||
|
│ │ ┌─────────┐ ┌──────────┐ ┌──────────┐ ┌────────┐ │ │
|
||||||
|
│ │ │ Planner │→ │ SQL Plan │→ │ Parse & │→ │Formul. │ │ │
|
||||||
|
│ │ │ Node │ │ Node │ │ Execute │ │ Node │ │ │
|
||||||
|
│ │ └────┬────┘ └──────────┘ └────┬─────┘ └────────┘ │ │
|
||||||
|
│ │ │ │ │ │
|
||||||
|
│ │ ├→ Tavily (Web Search) │ │ │
|
||||||
|
│ │ └→ Direct Answer │ │ │
|
||||||
|
│ └──────────────────────────────────┼──────────────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌──────────────────────────────────▼──────────────────────┐ │
|
||||||
|
│ │ PreprocessorConnector (HTTP POST → Azure SQL API) │ │
|
||||||
|
│ └─────────────────────────────────────────────────────────┘ │
|
||||||
|
│ │
|
||||||
|
│ ┌─────────────────────────────────────────────────────────┐ │
|
||||||
|
│ │ KnowledgeService (pgvector/RAG) -- NICHT IM CHATBOT │ │
|
||||||
|
│ │ Produktiv im AgentService + CommCoach │ │
|
||||||
|
│ └─────────────────────────────────────────────────────────┘ │
|
||||||
|
└──────────────────────────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌──────────────────────────▼──────────────────────────────────────┐
|
||||||
|
│ Azure Preprocessing Server (deployed, ERP-Daten deaktiviert) │
|
||||||
|
│ Tabellen: Artikel, Einkaufspreis, Lagerplatz, Lagerplatz_Art. │
|
||||||
|
│ Repo: github.com/valueonag/gateway_preprocessing │
|
||||||
|
└─────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### 1.2 Vorhandene Komponenten (Wiederverwendung)
|
||||||
|
|
||||||
|
| Komponente | Datei / Modul | Status | Wiederverwendbar für |
|
||||||
|
|---|---|---|---|
|
||||||
|
| LangGraph-Workflow | `chatbot/chatbot.py` | Produktiv (deaktiviert) | Alle Positionen -- Grundgerüst |
|
||||||
|
| PreprocessorConnector | `connectors/connectorPreprocessor.py` | Produktiv (deaktiviert) | Pos. 1, 2, 3, 4 -- SQL-Abfragen |
|
||||||
|
| ChatbotConfig | `chatbot/config.py` | Produktiv | Alle -- Konfiguration pro Instanz |
|
||||||
|
| Streaming-Bridge | `chatbot/service.py` | Produktiv | Alle -- SSE ans Frontend |
|
||||||
|
| ChatbotDocument | `chatbot/interfaceFeatureChatbot.py` | Implementiert | Pos. 1.4, 2.1, 2.5 -- File-Handling |
|
||||||
|
| KnowledgeService/RAG | `serviceCenter/services/serviceKnowledge/` | Produktiv (AgentService) | Pos. 5 -- Wiki-Integration |
|
||||||
|
| Automation-Template | `automation/subAutomationTemplates.py` | Produktiv | Pos. 6 -- Preprocessor-Updates |
|
||||||
|
| SQL-Sanitize | `chatbot.py` → `_sanitize_sql_typos` | Produktiv | Pos. 1.1 -- Gesperrte Artikel |
|
||||||
|
| Markdown-Tabellen | `chatbot.py` → `_tool_output_to_markdown_table` | Produktiv | Pos. 1.3, 3.3 -- Darstellung |
|
||||||
|
| File-Upload Backend | `service.py` → `_convert_file_ids_to_document_references` | Implementiert | Pos. 1.4 -- Upload-Pipeline |
|
||||||
|
| Excel-Export | `service.py` → `_create_chat_document_from_action_document` | Implementiert | Pos. 2.5 -- Kalktool-Export |
|
||||||
|
|
||||||
|
### 1.3 Fehlende Komponenten (Neuentwicklung)
|
||||||
|
|
||||||
|
| Komponente | Benötigt für | Komplexität |
|
||||||
|
|---|---|---|
|
||||||
|
| Matching-Engine (exakt → fuzzy → KI) | Pos. 2.2 | Hoch |
|
||||||
|
| Neuer Planner-Pfad "WIKI" | Pos. 5.2 | Mittel |
|
||||||
|
| KnowledgeService → Chatbot Integration | Pos. 5.2 | Mittel |
|
||||||
|
| Wiki-Connector (API/Crawling) | Pos. 5.1 | Unbekannt (Wiki-abhängig) |
|
||||||
|
| Delta-Sync-Mechanismus | Pos. 5.3 | Mittel |
|
||||||
|
| Preprocessor: 8-10 neue Tabellen/Views | Pos. 1.5, 3.1, 4.1 | Mittel (Code-Änderung) |
|
||||||
|
| Frontend: File-Picker, Drag&Drop | Pos. 1.4 | Mittel |
|
||||||
|
| Frontend: Thread-Liste, Suchfunktion | Pos. 1.2 | Mittel |
|
||||||
|
| Kalktool-Excel-Format-Export | Pos. 2.5 | Mittel |
|
||||||
|
| Schwellenwert-Insights | Pos. 4.5 | Mittel |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Detaillierte Aufwandsschätzung
|
||||||
|
|
||||||
|
### Position 1: Basics (Plattform-Verbesserungen)
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 1.1 | Gesperrte Artikel filtern | 4 | 3 | 4 | 4 | System-Prompt + SQL-Sanitize-Regel. Kleine Änderung. |
|
||||||
|
| 1.2 | Chat-Verlauf speichern | 12 | 12 | 14 | 16 | Backend existiert. Frontend-Aufwand (Thread-Liste, Suche). |
|
||||||
|
| 1.3 | Längere Antworten | 6 | 4 | 5 | 6 | Streaming-Config + Frontend-Rendering. |
|
||||||
|
| 1.4 | Datei-Upload | 16 | 16 | 18 | 20 | Full-Stack: Drag&Drop + LangGraph-Integration + Extraktion. |
|
||||||
|
| 1.5 | Kundenartikelnummern | 8 | 10 | 12 | 14 | Preprocessor-Code + Prompt + Cross-Ref-Queries. ERP-abhängig. |
|
||||||
|
| 1.6 | Abklärungen & Testing | 8 | 8 | 8 | 8 | Standard. |
|
||||||
|
| | **Subtotal** | **54** | **53** | **61** | **68** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +7h (Mitte) / +14h (Max)**
|
||||||
|
**Haupttreiber:** Preprocessor-Erweiterung für Kundenartikelnummern (Pos. 1.5) erfordert Code-Änderung, nicht nur Config. Frontend-Aufwand bei Upload (Pos. 1.4) eher am oberen Ende.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 2: Use Case Kalktool
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 2.1 | Stücklisten-Upload & Extraktion | 12 | 10 | 12 | 14 | Nutzt Pos. 1.4. serviceExtraction vorhanden. |
|
||||||
|
| 2.2 | Artikelidentifikation & Matching | 20 | 24 | 28 | 32 | **KRITISCH**: Neue Matching-Engine, 3 Stufen, ERP-abhängig. |
|
||||||
|
| 2.3 | Automatische Feldergänzung | 16 | 14 | 16 | 18 | Preprocessor + Enrichment-Logik. |
|
||||||
|
| 2.4 | Alternativartikel-Vorschläge | 12 | 12 | 14 | 16 | KI-Vorschläge + Bestätigungs-Workflow im Chat. |
|
||||||
|
| 2.5 | Excel-Export (Kalktool-Format) | 12 | 10 | 12 | 14 | Basis existiert. Kalktool-Vorlage-Anpassung. |
|
||||||
|
| 2.6 | Erweiterbarkeit neue Felder | 8 | 6 | 8 | 10 | Config-gesteuertes Feld-Mapping. |
|
||||||
|
| 2.7 | Abklärungen & Testing | 12 | 12 | 12 | 12 | Kalktool-Vorlage, Testdaten, UAT. |
|
||||||
|
| | **Subtotal** | **92** | **88** | **102** | **116** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +10h (Mitte) / +24h (Max)**
|
||||||
|
**Haupttreiber:** Die Matching-Engine (Pos. 2.2) ist die komplexeste Neuentwicklung im gesamten Projekt. Mehrstufiges Matching (exakt → fuzzy → KI-gestützt) ohne bestehende Basis. Die Qualität hängt stark von der ERP-Datenqualität und der Vielfalt der Kunden-Stücklisten-Formate ab.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 3: Use Case Materialmanagement 1
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 3.1 | ERP-Daten erweitern | 16 | 16 | 19 | 22 | Preprocessor: Bestellungen, Wareneingänge, Aufträge. Code nötig. |
|
||||||
|
| 3.2 | System-Prompt Materialmanagement | 8 | 6 | 8 | 10 | Prompt-Engineering + SQL-Templates. |
|
||||||
|
| 3.3 | Transparente Statusübersicht | 8 | 6 | 7 | 8 | Markdown-Rendering existiert, Erweiterung nötig. |
|
||||||
|
| 3.4 | Auswirkungsanalyse & Empfehlungen | 12 | 14 | 16 | 18 | Cross-Table-Queries + KI-Analyse. Komplex. |
|
||||||
|
| 3.5 | Abklärungen & Testing | 8 | 8 | 8 | 8 | Standard. |
|
||||||
|
| | **Subtotal** | **52** | **50** | **58** | **66** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +6h (Mitte) / +14h (Max)**
|
||||||
|
**Haupttreiber:** Auswirkungsanalyse (Pos. 3.4) erfordert Multi-Table-Joins und KI-gestützte Bewertung, was über einfache SQL-Abfragen hinausgeht.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 4: Use Case Materialmanagement 2 (KPIs)
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 4.1 | ERP-Daten erweitern | 16 | 16 | 19 | 22 | Lagerjournal, Preishistorie. Aggregierte Views. |
|
||||||
|
| 4.2 | System-Prompt KPI-Analyse | 8 | 6 | 8 | 10 | Prompt-Engineering. |
|
||||||
|
| 4.3 | Liefertermintreue-Analyse | 10 | 10 | 12 | 14 | Zeitreihen, Lieferantenvergleich, komplexe SQL. |
|
||||||
|
| 4.4 | Preisentwicklungs-Analyse | 10 | 10 | 11 | 12 | Preishistorie, Abweichungsberechnung. |
|
||||||
|
| 4.5 | Automatisierte Insights | 8 | 10 | 12 | 14 | Schwellenwert-Warnungen, proaktive Erkennung. Neues Konzept. |
|
||||||
|
| 4.6 | Abklärungen & Testing | 8 | 8 | 8 | 8 | Standard. |
|
||||||
|
| | **Subtotal** | **60** | **60** | **70** | **80** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +10h (Mitte) / +20h (Max)**
|
||||||
|
**Haupttreiber:** Automatisierte Insights (Pos. 4.5) erfordern eine neue Logikschicht, die proaktiv Schwellenwerte überwacht und Empfehlungen generiert. Das ist im aktuellen Chat-Flow nicht vorgesehen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 5: Use Case Wiki-Anbindung
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 5.1 | Wiki-Anbindung & Indexierung | 16 | 16 | 20 | 24 | KnowledgeService existiert. Wiki-Zugang UNBEKANNT. |
|
||||||
|
| 5.2 | RAG-Integration im Chatbot | 12 | 12 | 14 | 16 | Pattern existiert (AgentService), muss portiert werden. |
|
||||||
|
| 5.3 | Inkrementelle Aktualisierung | 8 | 8 | 11 | 14 | Delta-Sync stark Wiki-abhängig. |
|
||||||
|
| 5.4 | Abklärungen & Testing | 8 | 8 | 9 | 10 | Relevanz-Tuning ist iterativ. |
|
||||||
|
| | **Subtotal** | **44** | **44** | **54** | **64** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +10h (Mitte) / +20h (Max)**
|
||||||
|
**Haupttreiber:** Wiki-System ist unbekannt. Bei Wiki mit guter API (Confluence, SharePoint) sind 44h erreichbar. Bei proprietärem System ohne API steigt der Aufwand erheblich.
|
||||||
|
|
||||||
|
**Synergie:** KnowledgeService mit pgvector, Chunking, Embedding und semanticSearch ist bereits produktiv. Die RAG-Pipeline (Ingestion → Embedding → Retrieval) muss nicht neu gebaut werden. Das spart geschätzt 20-30h gegenüber einer Neuentwicklung.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 6: Azure-Migration
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 6.1 | Migration Preprocessor | 6 | 4 | 6 | 8 | Config-Änderungen, Env-Files, Netzwerk. |
|
||||||
|
| 6.2 | Validierung & Smoke-Tests | 4 | 4 | 4 | 4 | End-to-End-Tests. |
|
||||||
|
| | **Subtotal** | **10** | **8** | **10** | **12** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: 0h (Mitte)**
|
||||||
|
**Bewertung:** Realistisch. Einfachste Position.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Position 7: Projektmanagement
|
||||||
|
|
||||||
|
| # | Anforderung | Offerte | Min | Mitte | Max | Begründung |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|---|
|
||||||
|
| 7.1 | Kick-off & Workshop | 4 | 4 | 4 | 4 | Standard. |
|
||||||
|
| 7.2 | Projektmanagement | 8 | 10 | 12 | 14 | 10-14 Wochen, 3 Ansprechpartner, 7 Positionen. |
|
||||||
|
| 7.3 | Deployment & Go-Live | 6 | 6 | 7 | 8 | Staging + Prod + erste Betriebswoche. |
|
||||||
|
| | **Subtotal** | **18** | **20** | **23** | **26** | |
|
||||||
|
|
||||||
|
**Delta zur Offerte: +5h (Mitte) / +8h (Max)**
|
||||||
|
**Haupttreiber:** PM-Aufwand bei 3-Monats-Projekt mit mehreren Stakeholdern ist erfahrungsgemäss höher.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Gesamtübersicht
|
||||||
|
|
||||||
|
| Pos. | Beschreibung | Offerte (h) | Min (h) | Mitte (h) | Max (h) | Offerte CHF | Mitte CHF |
|
||||||
|
|---|---|:-:|:-:|:-:|:-:|:-:|:-:|
|
||||||
|
| 1 | Basics | 54 | 53 | 61 | 68 | 8'100 | 9'150 |
|
||||||
|
| 2 | Kalktool | 92 | 88 | 102 | 116 | 13'800 | 15'300 |
|
||||||
|
| 3 | Materialmanagement 1 | 52 | 50 | 58 | 66 | 7'800 | 8'700 |
|
||||||
|
| 4 | Materialmanagement 2 | 60 | 60 | 70 | 80 | 9'000 | 10'500 |
|
||||||
|
| 5 | Wiki-Anbindung | 44 | 44 | 54 | 64 | 6'600 | 8'100 |
|
||||||
|
| 6 | Azure-Migration | 10 | 8 | 10 | 12 | 1'500 | 1'500 |
|
||||||
|
| 7 | Projektmanagement | 18 | 20 | 23 | 26 | 2'700 | 3'450 |
|
||||||
|
| | **Gesamt** | **330** | **323** | **378** | **432** | **49'500** | **56'700** |
|
||||||
|
|
||||||
|
### Zusammenfassung
|
||||||
|
|
||||||
|
| Szenario | Stunden | CHF (à 150/h) | Differenz zur Offerte |
|
||||||
|
|---|:-:|:-:|:-:|
|
||||||
|
| Offerte (Kostendach) | 330 | 49'500 | -- |
|
||||||
|
| Eigene Schätzung (Minimum) | 323 | 48'450 | -2% |
|
||||||
|
| **Eigene Schätzung (Mitte)** | **378** | **56'700** | **+15%** |
|
||||||
|
| Eigene Schätzung (Maximum) | 432 | 64'800 | +31% |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Risikobewertung
|
||||||
|
|
||||||
|
### Risikomatrix
|
||||||
|
|
||||||
|
| # | Risiko | Wahrscheinlichkeit | Auswirkung | Betroffene Pos. | Möglicher Mehraufwand |
|
||||||
|
|---|---|:-:|:-:|---|:-:|
|
||||||
|
| R1 | Matching-Engine komplexer als erwartet | Hoch | Hoch | 2.2 | +10-15h |
|
||||||
|
| R2 | Wiki-System ohne API | Mittel | Hoch | 5.1, 5.3 | +10-20h |
|
||||||
|
| R3 | ERP-Datenqualität mangelhaft | Mittel | Mittel | 1.5, 2.2, 3.1, 4.1 | +8-16h |
|
||||||
|
| R4 | Preprocessor-Erweiterung aufwändiger | Mittel | Mittel | 1.5, 3.1, 4.1 | +8-12h |
|
||||||
|
| R5 | Frontend-Aufwand unterschätzt | Mittel | Gering | 1.2, 1.4 | +4-8h |
|
||||||
|
| R6 | KI-Modell-Qualität für SQL-Generierung | Gering | Mittel | 3, 4 | +4-8h |
|
||||||
|
|
||||||
|
### Synergien (Aufwandsreduktion durch bestehende Komponenten)
|
||||||
|
|
||||||
|
| Synergie | Geschätzte Einsparung | Betroffene Pos. |
|
||||||
|
|---|:-:|---|
|
||||||
|
| KnowledgeService/RAG existiert produktiv | 20-30h | Pos. 5 |
|
||||||
|
| ChatbotDocument-Modell existiert | 4-6h | Pos. 1.4, 2.1 |
|
||||||
|
| LangGraph modular erweiterbar | 6-10h | Pos. 3, 4, 5 |
|
||||||
|
| Prompt-Engineering über DB-Config | 2-4h | Pos. 1.1, 3.2, 4.2 |
|
||||||
|
| Excel-Export-Pattern existiert | 2-4h | Pos. 2.5 |
|
||||||
|
| **Gesamt Einsparung** | **34-54h** | |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Empfehlungen
|
||||||
|
|
||||||
|
### 5.1 Zur Offerte
|
||||||
|
|
||||||
|
Die Offerte mit 330h als Kostendach ist **ambitioniert, aber bei idealem Verlauf erreichbar**. Die grössten Risiken liegen in:
|
||||||
|
- Position 2 (Kalktool): Die Matching-Engine ist die komplexeste Neuentwicklung
|
||||||
|
- Position 5 (Wiki): Komplett abhängig vom Wiki-System, das noch unklärt ist
|
||||||
|
|
||||||
|
**Empfehlung:** Offerte bei 330h als Kostendach belassen, aber intern mit 370-380h planen. Die Differenz (~40-50h) als interne Reserve einkalkulieren.
|
||||||
|
|
||||||
|
### 5.2 Priorisierung
|
||||||
|
|
||||||
|
1. **Must-Have (Prio 1):** Pos. 1 (Basics) + Pos. 6 (Azure-Migration) -- Voraussetzung für alles
|
||||||
|
2. **High-Value (Prio 2):** Pos. 2 (Kalktool) -- Höchster Kundennutzen, aber auch höchstes Risiko
|
||||||
|
3. **Quick-Win (Prio 3):** Pos. 3+4 (Materialmanagement) -- Nutzen vorhandene Architektur
|
||||||
|
4. **Abhängig (Prio 4):** Pos. 5 (Wiki) -- Erst nach Wiki-Klärung starten
|
||||||
|
|
||||||
|
### 5.3 Offene Punkte (vor Projektstart zu klären)
|
||||||
|
|
||||||
|
| # | Offener Punkt | Verantwortlich | Kritisch für |
|
||||||
|
|---|---|---|---|
|
||||||
|
| O1 | Wiki-System und Zugangsart klären | Althaus (Samuel) | Pos. 5 |
|
||||||
|
| O2 | ERP-System identifizieren und Datenstrukturen dokumentieren | Althaus (Stefan) | Pos. 1.5, 3.1, 4.1 |
|
||||||
|
| O3 | Preprocessor-Code-Review für Erweiterbarkeit | PowerOn (Entwicklung) | Pos. 1.5, 3.1, 4.1 |
|
||||||
|
| O4 | Kalktool-Vorlage erhalten und analysieren | Althaus (Reto) | Pos. 2.5 |
|
||||||
|
| O5 | Muster-Stücklisten für Matching-Test | Althaus (Reto) | Pos. 2.2 |
|
||||||
|
| O6 | Azure-Subscription-Details | Althaus | Pos. 6 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Zeitplan (2 Entwickler)
|
||||||
|
|
||||||
|
```
|
||||||
|
Woche 1-2: Kick-off + Azure-Migration (Pos. 6) + Basics 1.1-1.3
|
||||||
|
Entwickler A: Azure-Migration + 1.1 (Gesperrte Artikel)
|
||||||
|
Entwickler B: 1.2 (Chat-Verlauf Frontend) + 1.3 (Lange Antworten)
|
||||||
|
|
||||||
|
Woche 2-5: Basics 1.4-1.6 (Grundlage für Use Cases)
|
||||||
|
Entwickler A: 1.4 (File-Upload Full-Stack)
|
||||||
|
Entwickler B: 1.5 (Kundenartikelnummern + Preprocessor)
|
||||||
|
|
||||||
|
Woche 4-9: Kalktool (Pos. 2) -- längster Block, früh starten
|
||||||
|
Entwickler A: 2.1-2.2 (Upload + Matching-Engine)
|
||||||
|
Entwickler B: 2.3-2.5 (Feldergänzung + Export)
|
||||||
|
|
||||||
|
Woche 6-9: Materialmanagement 1+2 (Pos. 3+4) -- parallel zum Kalktool
|
||||||
|
Entwickler B: 3.1-3.4 + 4.1-4.5 (Preprocessor + Prompts)
|
||||||
|
(Entwickler A bleibt auf Kalktool)
|
||||||
|
|
||||||
|
Woche 9-12: Wiki-Anbindung (Pos. 5) -- nach Klärung des Wiki-Systems
|
||||||
|
Entwickler A: 5.1-5.2 (Connector + RAG-Integration)
|
||||||
|
Entwickler B: 5.3 (Delta-Sync) + Integrationstests
|
||||||
|
|
||||||
|
Woche 12-13: Integrationstests, UAT, Go-Live (Pos. 7.3)
|
||||||
|
Beide Entwickler: E2E-Tests + Deployment + Monitoring
|
||||||
|
```
|
||||||
|
|
||||||
|
**Gesamtdauer:** 12-14 Wochen
|
||||||
|
**Kritischer Pfad:** Pos. 1 → Pos. 2 (Kalktool braucht Upload + Kundenartikelnummern)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Dokument erstellt auf Basis der Code-Analyse des Gateway-Repository (Stand 13.04.2026)*
|
||||||
143
docs/althaus-bot-v2-fragenkatalog.md
Normal file
143
docs/althaus-bot-v2-fragenkatalog.md
Normal file
|
|
@ -0,0 +1,143 @@
|
||||||
|
# Fragenkatalog Althaus Bot v2 -- Kick-off-Vorbereitung
|
||||||
|
|
||||||
|
**Zweck:** Strukturierte Fragen für den Anforderungsworkshop mit W. Althaus AG
|
||||||
|
**Erstellt:** 13. April 2026
|
||||||
|
**Zielgruppe:** Projektleitung PowerOn + Ansprechpartner Althaus (Reto, Stefan, Samuel)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## A. Wiki-System (Ansprechpartner: Samuel)
|
||||||
|
|
||||||
|
> **Kritisch für:** Position 5 (Wiki-Anbindung) -- Aufwandsschätzung schwankt zwischen 44h und 64h je nach Wiki-System.
|
||||||
|
|
||||||
|
### A.1 Wiki-Identifikation
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| A1.1 | Welches Wiki-System wird eingesetzt? (z.B. Confluence, SharePoint Wiki, MediaWiki, DokuWiki, Notion, anderes) | Bestimmt die Anbindungsstrategie (API vs. Export vs. Crawling) |
|
||||||
|
| A1.2 | Wo wird das Wiki gehostet? (Cloud-SaaS, On-Premise, Azure) | Netzwerk-Zugang und Firewall-Konfiguration |
|
||||||
|
| A1.3 | Wie viele Seiten/Artikel enthält das Wiki ungefähr? | Dimensionierung der Erstindexierung und Embedding-Kosten |
|
||||||
|
| A1.4 | In welchen Formaten liegen die Inhalte vor? (reiner Text, HTML, Markdown, eingebettete PDFs/Bilder) | Bestimmt die Extraktions-Komplexität |
|
||||||
|
|
||||||
|
### A.2 Technischer Zugang
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| A2.1 | Gibt es eine REST-API oder ähnliche Schnittstelle zum Lesen der Wiki-Inhalte? | API-Zugang = deutlich weniger Aufwand als Crawling |
|
||||||
|
| A2.2 | Gibt es eine Export-Funktion? (z.B. XML-Export, PDF-Export, Datenbank-Dump) | Fallback wenn keine API vorhanden |
|
||||||
|
| A2.3 | Gibt es Authentifizierung (API-Key, OAuth, LDAP)? Welche Credentials werden benötigt? | Konfiguration des Connectors |
|
||||||
|
| A2.4 | Gibt es eine Change-API oder Webhooks, die bei Änderungen notifizieren? | Bestimmt den Aufwand für inkrementelle Updates (Pos. 5.3) |
|
||||||
|
| A2.5 | Gibt es Zugriffsbeschränkungen auf bestimmte Wiki-Bereiche? | RBAC-Überlegungen bei der Indexierung |
|
||||||
|
|
||||||
|
### A.3 Inhaltliche Abgrenzung
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| A3.1 | Soll das gesamte Wiki indexiert werden oder nur bestimmte Bereiche? | Scope-Begrenzung für Erstindexierung |
|
||||||
|
| A3.2 | Gibt es vertrauliche Inhalte, die nicht in den Chatbot einfliessen dürfen? | Datenschutz-/Compliance-Anforderung |
|
||||||
|
| A3.3 | Wie oft werden Wiki-Inhalte aktualisiert? (täglich, wöchentlich, selten) | Bestimmt die Sync-Frequenz |
|
||||||
|
| A3.4 | Welche Sprache(n) haben die Wiki-Inhalte? (Deutsch, Englisch, gemischt) | Embedding-Modell-Auswahl |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## B. ERP-System & Datenstrukturen (Ansprechpartner: Stefan)
|
||||||
|
|
||||||
|
> **Kritisch für:** Positionen 1.5, 2.2-2.3, 3.1, 4.1 -- Preprocessor-Erweiterungen und Matching-Engine.
|
||||||
|
|
||||||
|
### B.1 ERP-Identifikation
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| B1.1 | Welches ERP-System wird eingesetzt? (z.B. Abacus, SAP, Microsoft Dynamics, bexio, Sage) | Bestimmt Datenstruktur und Zugriffsmöglichkeiten |
|
||||||
|
| B1.2 | Wie werden die Daten aktuell an den Preprocessor geliefert? (direkter DB-Zugriff, API, Export-Datei) | Verständnis der bestehenden Datenpipeline |
|
||||||
|
| B1.3 | In welchem Rhythmus werden die Daten aktualisiert? (Echtzeit, täglich, wöchentlich) | Aktualität der Chatbot-Antworten |
|
||||||
|
|
||||||
|
### B.2 Kundenartikelnummern (Position 1.5)
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| B2.1 | Gibt es im ERP eine dedizierte Tabelle für Kundenartikelnummern? Wenn ja, wie heisst sie? | Preprocessor-Schema-Erweiterung |
|
||||||
|
| B2.2 | Wie ist die Zuordnung: 1 Kundenartikel → 1 ERP-Artikel, oder n:m? | Bestimmt die Mapping-Komplexität |
|
||||||
|
| B2.3 | Wie viele Kundenartikelnummern gibt es ungefähr? | Dimensionierung |
|
||||||
|
| B2.4 | Welche Felder hat die Kundenartikelnummern-Tabelle? (z.B. KundenNr, KundenArtikelNr, InterneArtikelNr, Bezeichnung) | Schema-Definition für Preprocessor |
|
||||||
|
|
||||||
|
### B.3 Bestellwesen & Materialmanagement (Positionen 3 + 4)
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| B3.1 | Welche ERP-Tabellen/Views gibt es für Bestellungen? (Bestellkopf, Bestellpositionen, Status) | Preprocessor-Erweiterung Pos. 3.1 |
|
||||||
|
| B3.2 | Gibt es eine Tabelle für Wareneingänge mit Datum und Menge? | Liefertermin-Treue-Berechnung Pos. 4.3 |
|
||||||
|
| B3.3 | Gibt es eine Preishistorie-Tabelle? Welche Felder enthält sie? (Datum, Preis, Lieferant, Währung) | Preisentwicklungs-Analyse Pos. 4.4 |
|
||||||
|
| B3.4 | Gibt es ein Lagerjournal mit Buchungsdaten? | KPI-Analyse Pos. 4.1 |
|
||||||
|
| B3.5 | Gibt es eine Bestandesbedarfsliste oder Dispositions-View? | Material-Analyse Pos. 3.4 |
|
||||||
|
| B3.6 | Gibt es Felder für "bestätigter Liefertermin" vs. "gewünschter Liefertermin"? | Termintreue-KPI Pos. 4.3 |
|
||||||
|
| B3.7 | Wie viele offene Bestellungen gibt es typischerweise gleichzeitig? | Performance-Dimensionierung |
|
||||||
|
|
||||||
|
### B.4 Datenqualität
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| B4.1 | Wie konsistent sind Lieferanten-Namen im ERP? (exakt gleich oder Varianten wie "Siemens AG" vs. "Siemens") | Matching-Qualität Pos. 2.2 |
|
||||||
|
| B4.2 | Gibt es Pflichtfelder die häufig leer sind? | Feldergänzungs-Logik Pos. 2.3 |
|
||||||
|
| B4.3 | Wie sind Preise gespeichert? (Netto, Brutto, mit/ohne MwSt., Währung) | SQL-Query-Generierung |
|
||||||
|
| B4.4 | Werden gelöschte/gesperrte Datensätze physisch oder nur logisch gelöscht? | Filter-Logik Pos. 1.1 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## C. Kalktool (Ansprechpartner: Reto)
|
||||||
|
|
||||||
|
> **Kritisch für:** Position 2 (Kalktool) -- Höchstes Risiko in der Offerte.
|
||||||
|
|
||||||
|
### C.1 Kalktool-Vorlage
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| C1.1 | Können wir die aktuelle Kalktool-Vorlage (Kalktool_Aktuell_2026_V1.4.xlsx) erhalten? | Zielformat für Excel-Export Pos. 2.5 |
|
||||||
|
| C1.2 | Welche Spalten/Felder sind Pflicht in der Kalktool-Vorlage? | Feldergänzungs-Priorität Pos. 2.3 |
|
||||||
|
| C1.3 | Gibt es Formeln in der Vorlage, die erhalten bleiben müssen? | Komplexität des Excel-Exports |
|
||||||
|
| C1.4 | Welches Format haben die Kunden-Stücklisten typischerweise? (PDF, Excel, CSV) | Extraktions-Strategie Pos. 2.1 |
|
||||||
|
|
||||||
|
### C.2 Matching-Anforderungen
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| C2.1 | Können wir 3-5 Muster-Stücklisten von verschiedenen Kunden erhalten? | Testdaten für Matching-Engine Pos. 2.2 |
|
||||||
|
| C2.2 | Welche Identifikationsmerkmale haben Kunden-Stücklisten? (Kundenartikelnr., Hersteller-Typ, Beschreibung) | Matching-Stufen definieren |
|
||||||
|
| C2.3 | Wie hoch ist die erwartete Trefferquote beim exakten Match? (10%? 50%? 90%?) | Gewichtung exakt vs. fuzzy vs. KI |
|
||||||
|
| C2.4 | Welche Felder sollen bei nicht-eindeutigem Match als "Alternative durch KI" markiert werden? | Bestätigungs-Workflow Pos. 2.4 |
|
||||||
|
| C2.5 | Gibt es Produktgruppen, die besonders schwierig zu matchen sind? | Risikobewertung |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## D. Infrastruktur & Azure (Ansprechpartner: Stefan / IT)
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| D1 | Details zur neuen Azure-Subscription (Subscription-ID, Region, Resource Group) | Pos. 6 -- Migration |
|
||||||
|
| D2 | Gibt es Netzwerk-Einschränkungen (VPN, Private Endpoints, Firewall)? | Zugang Preprocessor ↔ ERP |
|
||||||
|
| D3 | Wer hat Admin-Zugang zur neuen Subscription? | Deployment-Planung |
|
||||||
|
| D4 | Gibt es Budget-Limits auf der Azure-Subscription? | Betriebskosten-Planung |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## E. Priorisierung & Vorgehensweise
|
||||||
|
|
||||||
|
| # | Frage | Hintergrund |
|
||||||
|
|---|---|---|
|
||||||
|
| E1 | Sollen alle 7 Positionen umgesetzt werden, oder gibt es eine Priorisierung? | Scope-Bestätigung |
|
||||||
|
| E2 | Gibt es einen gewünschten Go-Live-Termin? | Zeitplanung |
|
||||||
|
| E3 | Wie soll die UAT organisiert werden? (dedizierte Testphase, laufend, Key-User) | Testplanung |
|
||||||
|
| E4 | Wer sind die Pilot-User für den reaktivierten Bot? | UAT-Teilnehmer |
|
||||||
|
| E5 | Sollen Schulungen für Endanwender durchgeführt werden? (nicht in Offerte enthalten) | Ggf. Nachtragsofferte |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Nächste Schritte
|
||||||
|
|
||||||
|
1. **Vor dem Kick-off:** Fragenkatalog an Althaus senden, damit Antworten vorbereitet werden können
|
||||||
|
2. **Im Kick-off:** Fragen durchgehen, fehlende Antworten als Action Items festhalten
|
||||||
|
3. **Nach dem Kick-off:** Aufwandsschätzung anhand der Antworten finalisieren, insbesondere Pos. 2.2 (Matching) und Pos. 5 (Wiki)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*PowerOn AG -- Vorbereitung Anforderungsworkshop Althaus Bot v2*
|
||||||
223
docs/althaus-bot-v2-preprocessor-assessment.md
Normal file
223
docs/althaus-bot-v2-preprocessor-assessment.md
Normal file
|
|
@ -0,0 +1,223 @@
|
||||||
|
# Preprocessor Assessment -- Althaus Bot v2
|
||||||
|
|
||||||
|
**Zweck:** Technische Analyse des Preprocessing-Servers für die Aufwandsschätzung der Erweiterungen
|
||||||
|
**Erstellt:** 13. April 2026
|
||||||
|
**Quellen:** Gateway-Code-Analyse (Repo nicht lokal verfügbar: github.com/valueonag/gateway_preprocessing)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Ist-Zustand (abgeleitet aus Gateway-Code)
|
||||||
|
|
||||||
|
### 1.1 Infrastruktur
|
||||||
|
|
||||||
|
| Eigenschaft | Wert |
|
||||||
|
|---|---|
|
||||||
|
| **Host** | Azure App Service (Switzerland North) |
|
||||||
|
| **URL (Datenverarbeitung)** | `poweron-althaus-preprocess-prod-*.azurewebsites.net/api/v1/dataprocessor/update-db-with-config` |
|
||||||
|
| **URL (Abfragen)** | `poweron-althaus-preprocess-prod-*.azurewebsites.net/api/v1/dataquery/query` |
|
||||||
|
| **Authentifizierung** | `X-PP-API-Key` (Abfragen) / `X-DB-API-Key` (Abfragen) |
|
||||||
|
| **Status** | Deployed, ERP-Datenanbindung deaktiviert |
|
||||||
|
| **Quellcode** | `github.com/valueonag/gateway_preprocessing` (separates Repo) |
|
||||||
|
|
||||||
|
### 1.2 Aktuelle Tabellen-Konfiguration
|
||||||
|
|
||||||
|
Aus dem Automation-Template (`subAutomationTemplates.py`) extrahiert:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "Artikel",
|
||||||
|
"powerbi_table_name": "Artikel",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"keep": {
|
||||||
|
"columns": [
|
||||||
|
"I_ID", "Artikelbeschrieb", "Artikelbezeichnung",
|
||||||
|
"Artikelgruppe", "Artikelkategorie", "Artikelkürzel",
|
||||||
|
"Artikelnummer", "Einheit", "Gesperrt",
|
||||||
|
"Keywords", "Lieferant", "Warengruppe"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fillna": {
|
||||||
|
"column": "Lieferant",
|
||||||
|
"value": "Unbekannt"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Einkaufspreis",
|
||||||
|
"powerbi_table_name": "Einkaufspreis",
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"to_numeric": {
|
||||||
|
"column": "EP_CHF",
|
||||||
|
"errors": "coerce"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"dropna": {
|
||||||
|
"subset": ["EP_CHF"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 1.3 Zusätzliche Tabellen (im Chatbot referenziert, aber nicht in der Config)
|
||||||
|
|
||||||
|
Aus den SQL-Beispielen in `bridges/tools.py` und `chatbot.py`:
|
||||||
|
|
||||||
|
| Tabelle | Spalten (referenziert im Code) | Joins |
|
||||||
|
|---|---|---|
|
||||||
|
| `Lagerplatz_Artikel` | `R_ARTIKEL`, `R_LAGERPLATZ`, `S_IST_BESTAND`, `S_RESERVIERTER__BESTAND` | ON `Artikel.I_ID = Lagerplatz_Artikel.R_ARTIKEL` |
|
||||||
|
| `Lagerplatz` | `I_ID`, `Lagerplatz` (Name) | ON `Lagerplatz_Artikel.R_LAGERPLATZ = Lagerplatz.I_ID` |
|
||||||
|
|
||||||
|
Diese Tabellen sind vermutlich in einer älteren Config-Version oder direkt im Preprocessor konfiguriert.
|
||||||
|
|
||||||
|
### 1.4 API-Schnittstellen
|
||||||
|
|
||||||
|
**Abfrage-API** (genutzt vom `PreprocessorConnector`):
|
||||||
|
- Methode: `POST`
|
||||||
|
- Payload: `{"query": "SELECT ..."}`
|
||||||
|
- Header: `X-DB-API-Key: <api_key>`
|
||||||
|
- Response: `{"success": true/false, "data": [...], "row_count": N, "message": "..."}`
|
||||||
|
- Einschränkung: Nur SELECT-Queries (validiert im Gateway)
|
||||||
|
|
||||||
|
**Update-API** (genutzt vom Automation-Template):
|
||||||
|
- Methode: `POST`
|
||||||
|
- Payload: `configJson` (Tabellendefinitionen + Transformationsschritte)
|
||||||
|
- Header: `X-PP-API-Key: <secret>`
|
||||||
|
- Zweck: Datenbank mit neuer Konfiguration aktualisieren
|
||||||
|
|
||||||
|
### 1.5 Transformation-Steps (bekannte Operationen)
|
||||||
|
|
||||||
|
Aus der Config-JSON abgeleitet:
|
||||||
|
|
||||||
|
| Operation | Parameter | Beschreibung |
|
||||||
|
|---|---|---|
|
||||||
|
| `keep` | `columns: [...]` | Nur angegebene Spalten behalten |
|
||||||
|
| `fillna` | `column`, `value` | NULL-Werte ersetzen |
|
||||||
|
| `to_numeric` | `column`, `errors` | Spalte in numerischen Typ konvertieren |
|
||||||
|
| `dropna` | `subset: [...]` | Zeilen mit NULL in angegebenen Spalten entfernen |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Benötigte Erweiterungen (nach Position)
|
||||||
|
|
||||||
|
### 2.1 Position 1.5: Kundenartikelnummern
|
||||||
|
|
||||||
|
**Neue Tabelle: `Kundenartikelnummer`**
|
||||||
|
|
||||||
|
| Spalte (geschätzt) | Typ | Beschreibung |
|
||||||
|
|---|---|---|
|
||||||
|
| `I_ID` | INT | Primary Key |
|
||||||
|
| `R_ARTIKEL` | INT | FK auf Artikel.I_ID |
|
||||||
|
| `Kundenummer` | VARCHAR | Kundennummer |
|
||||||
|
| `Kundenartikelnummer` | VARCHAR | Kunden-eigene Artikelnummer |
|
||||||
|
| `Bezeichnung` | VARCHAR | Kundenbezeichnung (optional) |
|
||||||
|
|
||||||
|
**Config-Erweiterung:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Kundenartikelnummer",
|
||||||
|
"powerbi_table_name": "Kundenartikelnummer",
|
||||||
|
"steps": [
|
||||||
|
{"keep": {"columns": ["I_ID", "R_ARTIKEL", "Kundenummer", "Kundenartikelnummer", "Bezeichnung"]}}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Aufwand-Bewertung:** Falls der Preprocessor neue Tabellen per Config akzeptiert: ~2-3h Config + Test. Falls neuer Code nötig: ~6-8h.
|
||||||
|
|
||||||
|
### 2.2 Position 3.1: Bestellwesen (Materialmanagement 1)
|
||||||
|
|
||||||
|
**Neue Tabellen (geschätzt 3-4 Tabellen):**
|
||||||
|
|
||||||
|
| Tabelle | Wichtige Spalten | Zweck |
|
||||||
|
|---|---|---|
|
||||||
|
| `Bestellkopf` | ID, Bestellnummer, Lieferant, Bestelldatum, Status, Wunschtermin | Bestellübersicht |
|
||||||
|
| `Bestellposition` | ID, R_Bestellung, R_Artikel, Menge, Preis, Status, Bestätigter_Termin | Positionsdetails |
|
||||||
|
| `Wareneingang` | ID, R_Bestellung, R_Position, Eingangsdatum, Menge, Qualität | Lieferverfolgung |
|
||||||
|
| `Auftrag` | ID, Auftragsnummer, Kunde, R_Artikel, Menge, Termin | Betroffene Aufträge |
|
||||||
|
|
||||||
|
**Aufwand-Bewertung:** 4 Tabellen × ~4h pro Tabelle (Config + Code + Transformationen + Test) = ~16h. Bei komplexen Transformationen (Joins, Aggregationen): +4-6h.
|
||||||
|
|
||||||
|
### 2.3 Position 4.1: KPI-Daten (Materialmanagement 2)
|
||||||
|
|
||||||
|
**Neue Tabellen/Views (geschätzt 3-4):**
|
||||||
|
|
||||||
|
| Tabelle/View | Wichtige Spalten | Zweck |
|
||||||
|
|---|---|---|
|
||||||
|
| `Lagerjournal` | ID, R_Artikel, Buchungsdatum, Menge, Typ | Lagerbewegungen |
|
||||||
|
| `Preishistorie` | ID, R_Artikel, R_Lieferant, Datum, Preis, Währung | Preisentwicklung |
|
||||||
|
| `Bestandesbedarfsliste` | R_Artikel, Bedarf, Bestand, Fehlmenge, Datum | Dispositionsplanung |
|
||||||
|
| `View_Termintreue` | R_Lieferant, Wunschtermin, Bestätigt, Geliefert, Abweichung_Tage | Aggregierte KPIs |
|
||||||
|
|
||||||
|
**Aufwand-Bewertung:** 4 Tabellen/Views × ~4h = ~16h. Aggregierte Views (Termintreue): +4-6h für Berechnungslogik im Preprocessor.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Gesamtbewertung Preprocessor-Erweiterungen
|
||||||
|
|
||||||
|
### 3.1 Zusammenfassung
|
||||||
|
|
||||||
|
| Position | Neue Tabellen | Config-Aufwand | Code-Aufwand | Test | Gesamt |
|
||||||
|
|---|:-:|:-:|:-:|:-:|:-:|
|
||||||
|
| 1.5 (Kundenartikelnummern) | 1 | 1h | 3-5h | 2h | **6-8h** |
|
||||||
|
| 3.1 (Bestellwesen) | 3-4 | 2h | 8-12h | 4h | **14-18h** |
|
||||||
|
| 4.1 (KPIs) | 3-4 | 2h | 8-12h | 4h | **14-18h** |
|
||||||
|
| **Gesamt** | **7-9** | **5h** | **19-29h** | **10h** | **34-44h** |
|
||||||
|
|
||||||
|
### 3.2 Offene Fragen (Code-Review des Preprocessor-Repos erforderlich)
|
||||||
|
|
||||||
|
| # | Frage | Auswirkung |
|
||||||
|
|---|---|---|
|
||||||
|
| P1 | Unterstützt der Preprocessor neue Tabellen per Config-Erweiterung, oder muss für jede Tabelle Code geschrieben werden? | Bestimmt ob Config-only (~2h/Tabelle) oder Code (~4h/Tabelle) |
|
||||||
|
| P2 | Können aggregierte Views/Berechnungen im Preprocessor definiert werden? | Termintreue-KPI, Bestandsreichweite |
|
||||||
|
| P3 | Wie werden Joins zwischen Tabellen gehandhabt? (SQLite-seitig oder Preprocessor-seitig) | Komplexität der Cross-Table-Queries |
|
||||||
|
| P4 | Gibt es Rate-Limits oder Grössen-Limits bei der Query-API? | Performance bei komplexen KPI-Abfragen |
|
||||||
|
| P5 | Wie gross ist die aktuelle SQLite-Datenbank? Wie viele Artikel? | Dimensionierung für 8-10 neue Tabellen |
|
||||||
|
|
||||||
|
### 3.3 Empfehlung
|
||||||
|
|
||||||
|
**Vor Projektstart sollte ein Code-Review des Preprocessor-Repos durchgeführt werden** (geschätzter Aufwand: 2-4h). Dabei klären:
|
||||||
|
|
||||||
|
1. Erweiterbarkeit: Kann der Preprocessor neue Tabellen per Config akzeptieren?
|
||||||
|
2. Transformationen: Welche Operationen sind neben `keep`, `fillna`, `to_numeric`, `dropna` verfügbar?
|
||||||
|
3. Performance: Wie skaliert die SQLite-DB mit 8-10 zusätzlichen Tabellen?
|
||||||
|
4. Deployment: Wie wird der Preprocessor deployed? (CI/CD, manuell, Azure DevOps)
|
||||||
|
|
||||||
|
Das Ergebnis dieses Reviews kann die Aufwandsschätzung für Pos. 1.5, 3.1 und 4.1 um jeweils 4-6h nach oben oder unten korrigieren.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Aktueller Datenfluss (zur Referenz)
|
||||||
|
|
||||||
|
```
|
||||||
|
ERP (Althaus)
|
||||||
|
│
|
||||||
|
▼ (Power BI Export / API / DB-Zugriff -- Mechanismus unklar)
|
||||||
|
Preprocessor Server (Azure)
|
||||||
|
│
|
||||||
|
├── /api/v1/dataprocessor/update-db-with-config ← Automation-Template
|
||||||
|
│ (Tabellen laden, transformieren, in SQLite schreiben)
|
||||||
|
│
|
||||||
|
└── /api/v1/dataquery/query ← PreprocessorConnector (Gateway)
|
||||||
|
(SQL SELECT auf SQLite ausführen)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
Gateway (Chatbot LangGraph)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
React Frontend (Chat-UI)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Assessment erstellt auf Basis der Gateway-Code-Analyse. Für eine genauere Schätzung ist ein Code-Review des Preprocessor-Repos erforderlich.*
|
||||||
BIN
docs/billing-ui-tests.xlsx
Normal file
BIN
docs/billing-ui-tests.xlsx
Normal file
Binary file not shown.
225
docs/briefing-abacus-c-level.md
Normal file
225
docs/briefing-abacus-c-level.md
Normal file
|
|
@ -0,0 +1,225 @@
|
||||||
|
# PowerOn × Abacus — Executive Briefing
|
||||||
|
|
||||||
|
*Vertraulich · C-Level Briefing für Abacus Research AG · Stand April 2026*
|
||||||
|
|
||||||
|
**Zielgruppe:** Geschäftsleitung Abacus (Strategie, Produkt, Partnerschaften)
|
||||||
|
**Zweck:** Abacus ein klares, belastbares Bild davon geben, **wer PowerOn ist, was PowerOn/PORTA leistet und wo der strategische Hebel für eine Zusammenarbeit liegt** — inklusive konkretem Zwischenstand zur Abacus-Schnittstelle.
|
||||||
|
**Lesedauer:** 7 Minuten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Management Summary (60 Sekunden)
|
||||||
|
|
||||||
|
- **PowerOn ist eine Schweizer KI-Plattform** aus Zürich, die Unternehmen KI-gestützte Geschäftsprozesse **sicher, mandantengetrennt und datenschutzkonform** zur Verfügung stellt.
|
||||||
|
- Das Kernprodukt **PORTA** (Powerful Orchestration & Real-Time Automation) ist eine **in der Schweiz gehostete Multi-Mandanten-Plattform** mit vier Kernfunktionen in einem konsistenten System:
|
||||||
|
- **Multi-LLM-Orchestrierung** (Anthropic, OpenAI, Mistral, Perplexity, Tavily, Private LLM – kein Vendor-Lock-in)
|
||||||
|
- **Workflow-Automatisierung** (Graph-basierter Flow-Editor, Scheduler, Execution-Engine)
|
||||||
|
- **Datenneutralisierung** (zentrales AI-Gate, optional Hard-Mode, optional Private LLM)
|
||||||
|
- **Integriertes Audit- und Compliance-Logging** (DSGVO/revDSG, lückenloser Audit-Trail)
|
||||||
|
- PORTA ist modular aufgebaut (**Feature-Store**): Mandanten schalten nur die Module frei, die sie brauchen – u. a. **AI-Chat-Workspace, Treuhand-/Buchhaltungs-Modul mit Abacus-Anbindung, Kommunikations-/Coaching-Modul, Teams-Meeting-Bot, Machbarkeitsstudie Immobilien, Workflow-Designer / Automation-Studio**. Diese Bausteine laufen **produktiv** – der Plattform-Unterbau ist gebaut, Kundenprojekte beschränken sich auf **Konfiguration, Datenanbindung, Tuning, Schulung und Inbetriebnahme**.
|
||||||
|
- **Das Treuhand-Modul besitzt bereits eine abstrahierte Buchhaltungs-Schnittstelle** mit produktiven Connectoren für **Run my Accounts** und **Bexio** – sowie einem **bereits implementierten, lauffähigen Abacus-Connector** (OAuth 2.0, OData V4, Kontenplan, Buchungs-Push, Journal-Read, Debitoren, Kreditoren). Es fehlt nur der produktive Feinschliff mit einem Pilotkunden.
|
||||||
|
- **Strategischer Kern-Punkt für Abacus:** PowerOn ist **nicht** ein weiterer ERP-Wettbewerber. PowerOn ist die **KI- und Workflow-Schicht oberhalb** des ERP. Für Abacus-Kunden bedeutet das: Abacus bleibt System of Record – PORTA liefert Intelligenz, Automatisierung und ein modernes User-Interface auf den Daten, die in Abacus entstehen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Wer ist PowerOn?
|
||||||
|
|
||||||
|
### 2.1 Firma
|
||||||
|
|
||||||
|
- **PowerOn AG**, Birmensdorferstrasse 94, 8003 Zürich – `www.poweron.swiss`
|
||||||
|
- Schweizer Unternehmen, Schweizer Datenhaltung, Schweizer Kundenfokus (DACH).
|
||||||
|
- Entstanden aus der **ValueOn AG** (Strategie-/Beratungshaus) – derzeit in strukturierter Verselbständigung zur eigenständigen Organisation.
|
||||||
|
|
||||||
|
### 2.2 Gründer-/Kernteam
|
||||||
|
|
||||||
|
| Person | Rolle | Profil |
|
||||||
|
|---|---|---|
|
||||||
|
| **Patrick Motsch** | CEO / CTO | Langjährige Erfahrung in der Leitung komplexer IT-Implementierungen und innovativer Softwareentwicklung. |
|
||||||
|
| **Ida Dittrich** | Product Architect | Verbindet wissenschaftliches Know-how mit praktischer IT-Erfahrung und treibt die Produkt-/Architekturentscheide. |
|
||||||
|
| **Stephan Schellworth** | Business Integration | Verbindet strategisches Denken mit praxisnaher Projektsteuerung; Ansprechpartner für Partnerschaften und Kundenintegrationen. |
|
||||||
|
|
||||||
|
### 2.3 Reifegrad & Fokus
|
||||||
|
|
||||||
|
- **Produktstatus:** Early Product-Market-Fit mit produktiv laufenden Features; aktiv im Pilotkunden-Modus; Seed-Runde in Vorbereitung.
|
||||||
|
- **Zielmarkt:** mittelständische Unternehmen in datenschutzsensiblen Branchen – **Treuhand, Finanzdienstleistungen, Immobilien, Professional Services, Legal, Healthcare** – also eine sehr hohe Überlappung mit Abacus-Kernkunden.
|
||||||
|
- **Go-to-Market:** aktuell DACH; in einem strategischen Verbund mit **ValueOn (Strategie-Beratung), Aumico (Frontend/MVP) und Modeso (Hosting/SRE)** aufgestellt – End-to-End abdeckbar, ohne dass Abacus technologische oder operative Lücken schliessen müsste.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Was macht PowerOn – und was kann PORTA?
|
||||||
|
|
||||||
|
### 3.1 Die Kernidee in einem Satz
|
||||||
|
|
||||||
|
> **PowerOn liefert Unternehmen einen sicheren KI-Arbeitsplatz, der ihre Prozesse versteht, ihre Systeme anbindet und wiederkehrende Arbeit automatisiert – ohne dass sensible Daten unkontrolliert in fremde KI-Dienste abfliessen.**
|
||||||
|
|
||||||
|
### 3.1.1 Die vier Kernfunktionen von PORTA
|
||||||
|
|
||||||
|
PORTA bündelt in **einer** in der Schweiz gehosteten Multi-Mandanten-Plattform das, was sonst über mehrere isolierte Werkzeuge verteilt ist:
|
||||||
|
|
||||||
|
| Kernfunktion | Was sie leistet | Status |
|
||||||
|
|---|---|---|
|
||||||
|
| **Multi-LLM-Orchestrierung** | Zentrale Modellauswahl und Routing über mehrere Provider (Anthropic, OpenAI, Mistral, Perplexity, Tavily, Private LLM). Billing-Preflight, Streaming, Fallbacks, Operation-Typ-basierte Modellwahl. | **Produktiv** |
|
||||||
|
| **Workflow-Automatisierung** | Graph-basierter Flow-Editor (n8n-Style), Execution-Engine mit topologischer Sortierung, Scheduler, UDM-Dokumentenmodell, drei Modi (Learning / Actionplan / Automation). | **Produktiv** |
|
||||||
|
| **Datenneutralisierung** | Zentrales AI-Gate, das Prompt, RAG-Kontext und Messages vor jedem externen Modellaufruf pseudonymisiert. Hard-Mode blockiert Calls, wenn Neutralisierung nicht möglich. Private-LLM-Option für volle On-Prem-Variante. | **Produktiv** |
|
||||||
|
| **Audit- und Compliance-Logging** | Integrierter, lückenloser Audit-Trail für Zugriffe, Admin-Aktionen, Berechtigungs- und Verschlüsselungs-Events sowie KI-Datenflüsse. DSGVO-/revDSG-Betroffenenrechte als Self-Service. | **Produktiv** |
|
||||||
|
|
||||||
|
### 3.1.2 Produktiver Abdeckungsgrad (Stand heute)
|
||||||
|
|
||||||
|
Die für Treuhand, Finanz und KMU typischen Bausteine sind **bereits produktiv in der Plattform** und werden nicht erst für ein neues Projekt entwickelt:
|
||||||
|
|
||||||
|
- Buchhaltungs-Modul mit **Abacus-Anbindung** (sowie RMA und Bexio)
|
||||||
|
- **Coaching- und Trainings-Modul** (Kommunikations-Coach, Voice, Dossier, Gamification)
|
||||||
|
- **KI-Arbeitsplatz** (Power Desktop / AI-Workspace mit RAG und Agent-Tools)
|
||||||
|
- **Datenneutralisierung** (zentrales AI-Gate + Private-LLM-Option)
|
||||||
|
- **Workflow-Designer** (grafischer Flow-Editor inkl. Scheduler und Execution-Engine)
|
||||||
|
|
||||||
|
Für einen Abacus-nahen Kundenfall oder ein gemeinsames Pilot-Engagement reduziert sich der Projektaufwand damit auf **kundenspezifische Konfiguration, Datenanbindung, Tuning, Schulung und Inbetriebnahme** – nicht auf Plattform-Grundlagenentwicklung. Das ist der entscheidende Geschwindigkeitsvorteil gegenüber „We-build-it-from-scratch"-Angeboten.
|
||||||
|
|
||||||
|
### 3.2 Die fünf Prinzipien hinter „Erfolgreichem KI-Einsatz"
|
||||||
|
|
||||||
|
1. **Use-Cases zuerst:** Schrittweise Einführung statt Big-Bang. Mandanten aktivieren modular die Features, die sie brauchen.
|
||||||
|
2. **Datenschutz by Design:** Ein zentrales AI-Gate neutralisiert sensible Inhalte *vor* jedem externen Modell-Aufruf. Option: komplett lokaler Betrieb über Private LLM.
|
||||||
|
3. **Berechtigungen:** Vierstufiges RBAC (System → Mandant → Feature → Feature-Instanz), granular pro Aktion (Lesen/Schreiben/Bearbeiten/Löschen), vollständige Mandantentrennung serverseitig.
|
||||||
|
4. **Verbindungen:** Toolbox-Registry mit offenen Connectoren zu Microsoft 365, Google Workspace, SharePoint, ClickUp, Jira, E-Mail/SMS, Websuche, Swiss-Topo/Geo-Systemen und **Buchhaltungs-Systemen (RMA, Bexio, Abacus)**.
|
||||||
|
5. **Regeln / Ethik:** Lückenloser Audit-Trail, DSGVO-Betroffenenrechte als Self-Service, kein Training mit Kundendaten.
|
||||||
|
|
||||||
|
### 3.3 PORTA — Feature-Landkarte (Auszug)
|
||||||
|
|
||||||
|
| Feature | Was es tut | Relevanz für Abacus-Kundschaft |
|
||||||
|
|---|---|---|
|
||||||
|
| **Power Desktop / AI-Workspace** | KI-Chat mit RAG über Firmendokumente, Editor, Playground. 40+ Agent-Tools in thematischen Toolboxes. | Sofort-Nutzen für Treuhänder/Berater, die mit Dokumenten arbeiten. |
|
||||||
|
| **Treuhand-Modul** | Positionen, Dokumente, Expense Import, Scan/Upload, Buchhaltungs-Sync. Pluggable Connector-Architektur. | **Direkter Touchpoint zu Abacus** – siehe Kapitel 5. |
|
||||||
|
| **Automation Studio (n8n-Style Flow Editor)** | Graphical Flow Editor, Scheduler, Workflow-Runs, UDM-Dokumentenmodell. | Automatisiert Prozesse um/auf Abacus-Daten (Freigaben, Reports, Benachrichtigungen). |
|
||||||
|
| **Kommunikations-Coach** | KI-gestütztes Gesprächstraining mit Voice (STT/TTS), Dossier, Gamification. | Sales-Coaching, Kundenkommunikation, Onboarding. |
|
||||||
|
| **Teams-Meeting-Bot** | Nimmt an Teams-Meetings teil, transkribiert, antwortet kontextbezogen. | Meeting-Protokolle, Folge-Aufgaben automatisch aus Gesprächen ableiten. |
|
||||||
|
| **Machbarkeitsstudie Real Estate** | Extrahiert BZO/Parzellen-Daten und bewertet Immobilienpotenziale. | Spezialisiertes Branchen-Modul (Immobilien-Treuhand, Verwaltungen). |
|
||||||
|
| **Chatbot / Knowledge Retrieval** | RAG über Firmenwissen, semantische Suche via pgvector. | Interner Helpdesk, Dokumenten-Q&A. |
|
||||||
|
| **Neutralization / Private LLM** | Pseudonymisiert PII/Geschäftsgeheimnisse vor externen KI-Calls oder hält Daten komplett lokal. | Zwingend für Treuhand/Finanz-Kontext. |
|
||||||
|
|
||||||
|
### 3.4 Technische Basis (für das technische Gegenüber bei Abacus)
|
||||||
|
|
||||||
|
- **Backend (Gateway):** FastAPI/Python, PostgreSQL inkl. `pgvector` für Embeddings.
|
||||||
|
- **Frontend (Nyla):** React/TypeScript, Vite.
|
||||||
|
- **AI-Core:** Multi-Provider (Anthropic, OpenAI, Mistral, Perplexity, Tavily, Private LLM) — **Modellunabhängigkeit, kein Vendor-Lock-in**.
|
||||||
|
- **Architektur:** saubere Schichtung **Connectors → Interfaces → Services → ServiceCenter** mit zentraler Orchestrierung, `PublicService`-Wrapper für kontrolliertes API-Surface.
|
||||||
|
- **Workflow-Engine:** eigene Graph-Execution-Engine (topologische Sortierung, Transit-Routing, Schema-Validierung, Resume), drei Modi (Learning, Actionplan, Automation).
|
||||||
|
- **Security:** AES/Fernet + PBKDF2-HMAC-SHA256 für Secrets, JWT + Cookie-Session, CSRF, Rate-Limiting, parametrisierte Queries, RBAC serverseitig. Orientierung an DSGVO, revDSG und OWASP Top 10. Formale ISO-27001-Zertifizierung noch nicht vorhanden – technische Basis dafür vorhanden.
|
||||||
|
- **Betrieb:** Containerisiert, cloud-native, hosted bei Modeso (Partner) auf Google Cloud Infrastruktur, Deployment-Pipelines via GitHub Actions.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Differenzierung (warum nicht Microsoft Copilot oder n8n?)
|
||||||
|
|
||||||
|
| Anforderung | Microsoft Copilot / ChatGPT Enterprise | n8n / Zapier | **PowerOn PORTA** |
|
||||||
|
|---|---|---|---|
|
||||||
|
| Datenschutz-Neutralisierer | — | — | **Ja, zentral am AI-Gate** |
|
||||||
|
| Eigenes/lokales LLM möglich | Teilweise | — | **Ja, Private-LLM-Connector** |
|
||||||
|
| Multi-Provider, kein Lock-in | Nein | Ja | **Ja** |
|
||||||
|
| Business-User-fähig (ohne Entwickler) | Ja | Nein | **Ja** |
|
||||||
|
| Workflow- + Chat- + RAG in einer Plattform | Nein | Nur Workflow | **Ja** |
|
||||||
|
| Swiss-hosted, Swiss-built | Nein | Nein | **Ja** |
|
||||||
|
| Branchenmodule (Treuhand, Immobilien, …) | — | — | **Ja, Feature-Store** |
|
||||||
|
| Direkte Buchhaltungs-Integration | — | Generisch | **Ja (RMA, Bexio, Abacus ready)** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Die Abacus-Schnittstelle — konkreter Stand
|
||||||
|
|
||||||
|
Das ist der wichtigste Abschnitt für Abacus. **PowerOn hat die Abacus-Schnittstelle nicht nur sondiert, sondern bereits implementiert** – im Modul `trustee/accounting/connectors/accountingConnectorAbacus.py`.
|
||||||
|
|
||||||
|
### 5.1 Was bereits umgesetzt ist
|
||||||
|
|
||||||
|
| Bereich | Stand | Technik |
|
||||||
|
|---|---|---|
|
||||||
|
| Authentifizierung | **Implementiert** | OAuth 2.0 Client Credentials (Service User) mit OIDC-Discovery (`/.well-known/openid-configuration`), Token-Caching, automatischer Refresh |
|
||||||
|
| Datenmodell (Abacus ↔ PORTA) | **Implementiert** | Entity-API via OData V4, pro Mandant konfigurierbare `apiBaseUrl` und `clientName` |
|
||||||
|
| Kontenplan (`Accounts`) | **Implementiert** | Paginiertes Auslesen inkl. `@odata.nextLink`, Mapping auf einheitliches `AccountingChart`-Format |
|
||||||
|
| Buchung erfassen (`GeneralJournalEntries`) | **Implementiert** | POST mit Mehrzeilen-Journal, Debit/Credit/TaxCode/CostCenter, Rücklieferung `externalId` |
|
||||||
|
| Buchungsstatus lesen | **Implementiert** | GET auf `GeneralJournalEntries({id})` |
|
||||||
|
| Journal lesen (Zeitraum-/Filter) | **Implementiert** | `$filter` auf `JournalDate`, paginiertes Streaming |
|
||||||
|
| Stammdaten | **Implementiert** | `Debtors`, `Creditors` |
|
||||||
|
| Sicherheit | **Implementiert** | Secrets verschlüsselt gespeichert (`TrusteeAccountingConfig.encryptedConfig`), Plugin-Discovery identisch zu Bexio/RMA |
|
||||||
|
|
||||||
|
### 5.2 Architektur-Prinzip
|
||||||
|
|
||||||
|
Die Connector-Schicht ist **abstrahiert** (`BaseAccountingConnector`). Alle Buchhaltungs-Integrationen teilen sich dieselben Datenmodelle (`AccountingBooking`, `AccountingBookingLine`, `AccountingChart`, `SyncResult`) und werden über eine **Plugin-Registry** discovered. Das heisst:
|
||||||
|
|
||||||
|
- Jeder neue Connector (Abacus, SAP Business One, Sage etc.) wird ohne Änderung am Kernsystem angeflanscht.
|
||||||
|
- Abacus steht **auf Augenhöhe** mit Bexio und Run my Accounts im Produkt.
|
||||||
|
- Kunden können in der gleichen PORTA-Oberfläche zwischen den Systemen wählen bzw. umziehen.
|
||||||
|
|
||||||
|
### 5.3 Was noch offen ist
|
||||||
|
|
||||||
|
- **Produktiv-Pilot mit einem realen Abacus-Mandanten** (Credentials, Mandantenstruktur, Konto-Mapping, Kostenstellen-Logik, Beleg-Anhänge via Dokument-Upload).
|
||||||
|
- **Feinheiten**: Mehrwährung, spezifische Abacus-Customizings, Dokument-Anhänge an Buchungen (`uploadDocument` ist im Basis-Interface vorgesehen), Rückkanal für Freigabe-Workflows.
|
||||||
|
- **Zertifizierung/Partner-Listing** auf Abacus-Seite.
|
||||||
|
|
||||||
|
### 5.4 Projektcharakter bei einem gemeinsamen Kundenengagement
|
||||||
|
|
||||||
|
Weil die Plattform-Bausteine (Buchhaltungs-Modul mit Abacus-Anbindung, KI-Arbeitsplatz, Datenneutralisierung, Workflow-Designer, Coaching-Modul) **produktiv laufen**, reduziert sich ein gemeinsames Kundenprojekt auf klar abgrenzbare, planbare Tätigkeiten – nicht auf Plattform-Neuentwicklung:
|
||||||
|
|
||||||
|
| Aufwandsblock | Inhalt |
|
||||||
|
|---|---|
|
||||||
|
| **Konfiguration** | Aktivierung der benötigten PORTA-Module pro Mandant, Rollen-/RBAC-Modell, Feature-Instanzen, Branding |
|
||||||
|
| **Datenanbindung** | Abacus-Credentials (OAuth 2.0), Kontenplan-Mapping, Debitoren/Kreditoren-Synchronisation, ggf. weitere Quellen (SharePoint, Mail, DMS) |
|
||||||
|
| **Tuning** | Prompt-Tuning für die konkreten Use-Cases, Neutralisierungs-Regeln auf Kundenebene, Modellauswahl pro Operation |
|
||||||
|
| **Schulung** | Onboarding Endanwender, Admin-Training, Enablement für Treuhand-Teams |
|
||||||
|
| **Inbetriebnahme** | Pilotbetrieb, Abnahme, Go-Live, Hyper-Care, Hand-Over an Betrieb (Modeso / Abacus-Partnerbetrieb) |
|
||||||
|
|
||||||
|
Das macht ein JV-Angebot an einen Abacus-Endkunden **kalkulierbar und schnell umsetzbar** – ein Setup, das in Wochen, nicht in Quartalen live geht.
|
||||||
|
|
||||||
|
### 5.4 Warum das für Abacus strategisch interessant ist
|
||||||
|
|
||||||
|
1. **Keine Konkurrenz, echte Ergänzung:** PORTA schreibt in Abacus, es ersetzt es nicht. Abacus bleibt das System of Record.
|
||||||
|
2. **Moderne UI-Schicht für Abacus-Kunden:** Treuhänder, die heute für KI-Features zu anderen Werkzeugen greifen, bleiben im Abacus-Ökosystem.
|
||||||
|
3. **Generator für Beleg-Volumen in Abacus:** PORTA verarbeitet Scans, Spesen, Dokumente automatisch und erzeugt saubere Buchungen in Abacus. Das erhöht die Nutzungstiefe pro Abacus-Mandant.
|
||||||
|
4. **Schweizer Stack Ende-zu-Ende:** Schweizer ERP (Abacus) × Schweizer KI-Plattform (PowerOn) × Schweizer Hosting (Modeso auf GCP CH) – ein seltenes Alleinstellungsmerkmal im Markt.
|
||||||
|
5. **Datenschutz-Thema ist vorgelöst:** Der Neutralisierer ist für genau das Szenario gebaut, das Abacus-Kunden (KMU, Treuhand, Finanz) am meisten Sorge macht, wenn sie über KI nachdenken.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Mögliche Joint-Venture-Thesen
|
||||||
|
|
||||||
|
Zur Vorbereitung der nächsten Abacus-Gespräche – nicht abschliessend, sondern als Diskussionsgrundlage.
|
||||||
|
|
||||||
|
### These A — „Abacus als strategischer Go-To-Market-Kanal"
|
||||||
|
PowerOn liefert das KI-/Workflow-Produkt, Abacus öffnet die Tür zur bestehenden Treuhand-/KMU-Kundschaft. Co-Marketing, gemeinsame Referenzkunden, Listing im Abacus-Ökosystem.
|
||||||
|
|
||||||
|
### These B — „Abacus-Branded KI-Layer"
|
||||||
|
PORTA wird als Abacus-gelabeltes Modul („AbaAI", „Abacus Intelligence", o. ä.) angeboten. Abacus kontrolliert Pricing und Packaging gegenüber dem Endkunden, PowerOn bleibt die technische Plattform-Basis.
|
||||||
|
|
||||||
|
### These C — „Gemeinsame Produktentwicklung mit Fokus Treuhand"
|
||||||
|
Tiefe Integration des PowerOn-Treuhand-Moduls mit Abacus AbaWeb/AbaNinja – inklusive Automation-Templates für typische Treuhand-Use-Cases (Kreditorenflut, Spesenimport, MwSt-Abstimmung, Mandats-Reporting).
|
||||||
|
|
||||||
|
### These D — „Beteiligung / Minderheits-Invest"
|
||||||
|
Abacus beteiligt sich an der anstehenden Seed-Runde und sichert sich damit strategische Einflussmöglichkeiten, ohne PowerOn als eigenständiges Unternehmen zu vereinnahmen.
|
||||||
|
|
||||||
|
Alle vier Thesen sind kompatibel und können gestaffelt umgesetzt werden (A → C → B → ggf. D).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Empfohlene nächste Schritte
|
||||||
|
|
||||||
|
| # | Schritt | Owner | Zeithorizont |
|
||||||
|
|---|---|---|---|
|
||||||
|
| 1 | Technisches Deep-Dive: Live-Demo des Abacus-Connectors auf einer Abacus-Testinstanz | PowerOn (P. Motsch) × Abacus (Tech/Produkt) | 2 Wochen |
|
||||||
|
| 2 | Gemeinsamer Pilot-Kunde aus dem Abacus-Treuhand-Segment | Abacus (Sales) × PowerOn (S. Schellworth) | 4–6 Wochen |
|
||||||
|
| 3 | Strategie-Workshop zu JV-Modell (Thesen A–D) | beide GL | 4 Wochen |
|
||||||
|
| 4 | NDA + DPA für vertiefte technische Zusammenarbeit | Legal beider Seiten | sofort |
|
||||||
|
| 5 | Gemeinsamer Messeauftritt / Webinar Treuhand-KI | Marketing beider Seiten | Q3 2026 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Kontakt
|
||||||
|
|
||||||
|
**PowerOn AG**
|
||||||
|
Birmensdorferstrasse 94 · 8003 Zürich · Schweiz
|
||||||
|
`www.poweron.swiss` · `info@poweron.swiss`
|
||||||
|
|
||||||
|
- **Patrick Motsch** – CEO / CTO – Produkt- und Technikthemen
|
||||||
|
- **Stephan Schellworth** – Business Integration – Partnerschaften und Kundenintegration
|
||||||
|
- **Ida Dittrich** – Product Architect – Architektur und Roadmap
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Dieses Dokument ist eine konsolidierte Aufbereitung des aktuellen Produkt- und Technikstands von PowerOn PORTA auf Basis der internen Wiki-Kanon-Seiten (`a-strategy/product-vision.md`, `a-strategy/product-strategy.md`, `b-reference/product.md`, `b-reference/gateway/architecture.md`, `b-reference/gateway/ai-agent.md`, `b-reference/platform/neutralization.md`, `e-compliance/security-overview.md`) sowie einer direkten Code-Verifikation im Gateway-Repository (Stand April 2026). Angaben ohne Gewähr – für verbindliche Zusicherungen gelten die jeweiligen Vertragsvereinbarungen.*
|
||||||
100
docs/brochure-poweron-investor-clevel.md
Normal file
100
docs/brochure-poweron-investor-clevel.md
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
# PowerOn – KI-gestützte Automatisierung
|
||||||
|
*Fertiger Copy-Stand für Canva / PowerPoint / PDF-Export. 5 Folien.*
|
||||||
|
|
||||||
|
**Schreibweise:** durchgängig **PowerOn** (nicht PowerON).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 1 von 5 – Intro
|
||||||
|
|
||||||
|
### PowerOn
|
||||||
|
|
||||||
|
**KI, die Ihre Kapazität freisetzt.**
|
||||||
|
|
||||||
|
Von manuellen Prozessen zu KI-unterstützten Abläufen – schnell, konkret, sicher.
|
||||||
|
|
||||||
|
**www.poweron.swiss · info@poweron.swiss**
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Titel-Layout, PowerOn-Logo zentriert, keine Tabellen. Hintergrund clean, ggf. dezentes Grafik-Element.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 2 von 5 – Der Weg zur KI-gestützten Automatisierung
|
||||||
|
|
||||||
|
### Verschwenden Sie Ihre kostbare Kapazität nicht – steigern Sie Ihre Innovationskraft
|
||||||
|
|
||||||
|
**Von aufwendigen manuellen Prozessen zu KI-unterstützten automatisierten Prozessen**
|
||||||
|
|
||||||
|
| Schritt 1 | Schritt 2 | Schritt 3 | Schritt 4 |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| **Verstehen der spezifischen Anforderungen** | **Mögliche KI-Unterstützung identifizieren** | **Komplexität reduzieren** | **Vertrauenswürdige Informationen** |
|
||||||
|
| Wir analysieren, welche Prozesse manuell, repetitiv oder fehleranfällig sind. | Wir prüfen, wo KI konkret unterstützen oder Aufgaben übernehmen kann. | Unnötige Schritte werden eliminiert und Schnittstellen vereinfacht. | Die KI arbeitet ausschliesslich mit geprüften, klar definierten Daten. |
|
||||||
|
|
||||||
|
**Manuell → Automatisiert**
|
||||||
|
|
||||||
|
### KI wird Ihr Assistent
|
||||||
|
|
||||||
|
| Daten-Extraktion | Fraud Detection | Compliance Check | Smarte Freigabe | Prozessführung |
|
||||||
|
| --- | --- | --- | --- | --- |
|
||||||
|
| Relevante Inhalte automatisch aus Dokumenten gewinnen | Auffälligkeiten frühzeitig erkennen | Regelwerke automatisiert prüfen | Freigabeprozesse mit KI-Empfehlung beschleunigen | Abläufe Schritt für Schritt begleiten |
|
||||||
|
|
||||||
|
**schnell – konkret**
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Vier Schritte als horizontaler Pfeil (links → rechts). Darunter die fünf KI-Outputs als Icon-Leiste. Claim „schnell – konkret" rechts unten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 3 von 5 – Ihre Erfolgsstory – gezielt und fokussiert
|
||||||
|
|
||||||
|
| Ihre Herausforderung | Unser Vorgehen | Ihr Ergebnis |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| Sie wissen, dass Digitalisierung und KI wichtig sind – aber nicht, wo Sie am wirkungsvollsten ansetzen. | **Fragebogen** – gezielte Vorerhebung Ihrer Ausgangslage | **Handlungsfelder mit Massnahmen** – Wo liegt der grösste Hebel? |
|
||||||
|
| | **Interview** – vertiefte Analyse Ihrer Prozesse und Engpässe | **Kausalnetz mit Abhängigkeiten** – Welche Massnahmen bauen aufeinander auf? |
|
||||||
|
| | **Initial-Workshop** – gemeinsame Erarbeitung mit Ihrem Team | **Priorisierte Umsetzungsroadmap** – In welcher Reihenfolge vorgehen? |
|
||||||
|
| | **Analyse** – Synthese und Aufbereitung durch PowerOn | **Massnahmen-Steckbrief** – Jede Massnahme einzeln beschrieben und umsetzbar |
|
||||||
|
| | | → **Digitalisierung** und **Einsatz von KI** |
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
subgraph vorgehen [Unser Vorgehen]
|
||||||
|
Fragebogen --> Interview --> Workshop[Initial-Workshop] --> Analyse
|
||||||
|
end
|
||||||
|
subgraph ergebnis [Ihr Ergebnis]
|
||||||
|
Analyse --> Handlungsfelder
|
||||||
|
Handlungsfelder --> Kausalnetz
|
||||||
|
Kausalnetz --> Roadmap[Priorisierte Roadmap]
|
||||||
|
Roadmap --> Steckbrief[Massnahmen-Steckbrief]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Drei-Spalten-Layout. Links: Fragezeichen-Symbolik für die Herausforderung. Mitte: Trichter von oben (Fragebogen) nach unten (Analyse). Rechts: Ergebnis-Artefakte als aufsteigende Liste.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 4 von 5 – Das halten Sie am Ende in der Hand
|
||||||
|
|
||||||
|
| Handlungsfelder mit Massnahmen | Kausalnetz – Massnahmen mit Abhängigkeiten |
|
||||||
|
| --- | --- |
|
||||||
|
| Identifizierte Bereiche mit konkreten Massnahmen, zugeordnet zu Ihren Geschäftszielen. | Visualisierung der Wechselwirkungen zwischen Massnahmen – inklusive Engpässe und Voraussetzungen. |
|
||||||
|
|
||||||
|
| Priorisierte Umsetzungsroadmap | Steckbrief je Massnahme |
|
||||||
|
| --- | --- |
|
||||||
|
| Zeitliche Abfolge mit klarer Priorisierung – von Quick Wins bis zu strategischen Initiativen. | Beschreibung, Ziel, Aufwand, Abhängigkeiten und nächste Schritte – pro Massnahme einzeln dokumentiert. |
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** 2×2-Raster, vier gleichgrosse Kacheln. Jede Kachel mit Titel und einem Satz. Optional je ein abstraktes Icon.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 5 von 5 – Erfolgreicher Einsatz von KI
|
||||||
|
|
||||||
|
### Einsatz von KI
|
||||||
|
|
||||||
|
| Prinzip | Erklärung |
|
||||||
|
| --- | --- |
|
||||||
|
| **Datenschutz** | Keine sensitiven Daten gelangen nach aussen. Die Verarbeitung bleibt innerhalb klar definierter Grenzen. |
|
||||||
|
| **Klar definierte Use-Cases** | KI wird nur dort eingesetzt, wo der Anwendungsfall geprüft und freigegeben ist. |
|
||||||
|
| **Einfache Anbindung** | Bestehende Informationsquellen und Agentensysteme lassen sich ohne grossen Aufwand verbinden. |
|
||||||
|
| **Vertrauensvoller, fairer Einsatz** | Die KI arbeitet nachvollziehbar. Ergebnisse sind überprüfbar, Entscheidungen bleiben beim Menschen. |
|
||||||
|
| **Zugriff nur auf definierte Daten** | Die KI hat ausschliesslich Zugriff auf klar freigegebene Datenquellen – kein unkontrolliertes Training. |
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Zentrales Label „Einsatz von KI" in der Mitte. Die fünf Prinzipien als Kranz/Stern drumherum angeordnet – je mit Icon (Schloss, Zielscheibe, Stecker, Waage, Auge).
|
||||||
347
docs/case-study-power-desktop.md
Normal file
347
docs/case-study-power-desktop.md
Normal file
|
|
@ -0,0 +1,347 @@
|
||||||
|
# PowerOn Desktop
|
||||||
|
*Der zentrale AI Workspace fuer Unternehmen, die produktiver, sicherer und schneller arbeiten wollen.*
|
||||||
|
**Subline:** Ein Workspace. Alle Daten. Alle KI-Faehigkeiten.
|
||||||
|
|
||||||
|
---
|
||||||
|
**1 von 16**
|
||||||
|
|
||||||
|
## Seite 1 - Cover
|
||||||
|
*KI, Daten und Teamarbeit – ein gemeinsamer Arbeitsraum.*
|
||||||
|
|
||||||
|
PowerOn Desktop bringt KI, Daten und Teamarbeit in eine gemeinsame Arbeitsumgebung.
|
||||||
|
Sie reduzieren Reibung im Alltag und schaffen messbaren Mehrwert ab dem ersten Use Case.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine moderne isometrische SaaS-Hero-Illustration eines digitalen Arbeitsplatzes. Zeige ein zentrales Dashboard mit verbundenen Modulen fuer Chat, Dokumente, Datenquellen und Automationen. Stil clean, hochwertig, C-Level-Praesentation. Farbpalette mit Primaerblau #1976d2, Tuerkis-Akzenten, Weiss und dezenten Grautoenen. Licht, Tiefe, klare Linien, keine Personenfotos. Kein Text im Bild. 16:9, hohe Aufloesung.
|
||||||
|
|
||||||
|
---
|
||||||
|
**2 von 16**
|
||||||
|
|
||||||
|
## Seite 2 - Die Herausforderung
|
||||||
|
*Wenn Wissen zerstreut ist, leidet die Wertschoepfung.*
|
||||||
|
|
||||||
|
In den meisten Unternehmen ist Wissen verteilt: Dateien, Mails, Fachsysteme und Meetings laufen nebeneinander.
|
||||||
|
Teams springen zwischen Tools, verlieren Kontext und investieren zu viel Zeit in Suche statt in Entscheidungen.
|
||||||
|
|
||||||
|
**Typische Folgen:**
|
||||||
|
- Lange Recherchezeiten bei jeder wichtigen Frage
|
||||||
|
- Uneinheitliche Qualitaet in Ergebnissen
|
||||||
|
- Hoehere Risiken bei Datenschutz und Compliance
|
||||||
|
- KI bleibt auf einzelne Experimente begrenzt
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Visualisiere eine fragmentierte Unternehmenslandschaft mit vielen isolierten Dateninseln: Dokumente, E-Mail, CRM, Tabellen, Tickets. Verbinde sie nicht direkt, sondern zeige bewusst Brueche und Medienwechsel. Abstrakt, modern, minimalistisch, isometrischer Look. Farben: Grau fuer Fragmentierung, Akzente in Blau fuer Potenzial. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**3 von 16**
|
||||||
|
|
||||||
|
## Seite 3 - Die Loesung im Ueberblick
|
||||||
|
*Vier Zugaenge, ein durchgaengiger Kontext.*
|
||||||
|
|
||||||
|
PowerOn Desktop schafft einen gemeinsamen Arbeitsraum fuer vier Kernaufgaben:
|
||||||
|
- **Denken und abstimmen (AI Chat)** – Fragen, Entwuerfe, Abstimmung und Entscheidungsvorbereitung an einem Ort
|
||||||
|
- **Inhalte umsetzen (Editor)** – Texte und Dokumente mit KI-Unterstuetzung bearbeiten, aber immer mit Ihrer Freigabe
|
||||||
|
- **Wissen verbinden (Datenquellen)** – Dateien, Clouds und Fachsysteme als durchsuchbaren Kontext einbinden
|
||||||
|
- **Prozesse beschleunigen (Workflows und Automation)** – Wiederkehrende Ablaeufe planbar ausfuehren und Ergebnisse wiederverwenden
|
||||||
|
|
||||||
|
**Warum das fuer Fuehrungsteams zaehlt:** Statt fuenf getrennte Tools entsteht ein durchgaengiger Arbeitsfluss. Der Kontext aus Chat, Dateien und Quellen bleibt erhalten. Teams sparen Such- und Abstimmungszeit, und Sie behalten die Steuerung darueber, welche Informationen ueberhaupt in die KI einfliessen.
|
||||||
|
|
||||||
|
**Typischer Ablauf im Alltag:** Information beschaffen (Quellen) – diskutieren und strukturieren (Chat) – Inhalt finalisieren (Editor) – bei Bedarf automatisieren (Workflows). Alles in derselben Instanz, ohne Export-Chaos und ohne Kontextverlust zwischen den Schritten.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine isometrische Uebersichtsillustration mit einem zentralen Hub und vier klar verbundenen Modulen: Chat, Editor, Data Sources, Automation. Datenstroeme sollen in beide Richtungen fliessen. Stil: enterprise SaaS, aufgeraeumt, premium, viel White Space. Farbsystem mit Blau #1976d2 als Leitfarbe, Tuerkis und Violett als Sekundaerfarben. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**4 von 16**
|
||||||
|
|
||||||
|
## Seite 4 - Arbeitsbereich 1: AI Chat
|
||||||
|
*Strukturiert denken – mit nachvollziehbaren Antworten.*
|
||||||
|
|
||||||
|
Der AI Chat ist das Sprungbrett fuer den produktiven Einsatz von KI im Tagesgeschaeft. Teams nutzen ihn fuer Analyse, Formulierung, Zusammenfassungen und Entscheidungsvorbereitung – ohne dass Fachwissen in Prompt-Engineering ausarten muss.
|
||||||
|
|
||||||
|
**Was Entscheider schaetzen:** Antworten bleiben nachvollziehbar, weil Bezuege zu Quellen und Verarbeitungsschritten sichtbar werden. Das reduziert das Risiko von „halluzinierten“ Fakten und erleichtert interne Freigaben. Optional unterstuetzt Spracheingabe und Sprachausgabe – etwa fuer schnelle Notizen unterwegs oder barrierefreies Arbeiten.
|
||||||
|
|
||||||
|
**Konkrete Einsatzszenarien:** Erstentwurf fuer Kundenmail oder interne Mitteilung; Strukturierung eines Meetings oder eines Projektbriefings; Einordnung einer laengeren Unterlage mit klaren Bezugspunkten; Vorbereitung einer Praesentation aus gebundenem Kontext statt aus dem Gedaechtnis.
|
||||||
|
|
||||||
|
**Im Workspace sichtbar:** Verlauf der Unterhaltung, Anhaenge und Dateibezuege, nachvollziehbare Zwischenschritte bei komplexeren Anfragen – damit bleibt nachvollziehbar, *wie* ein Ergebnis zustande kam.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Schnellere Erstentwuerfe fuer Mails, Konzepte und Entscheidungen
|
||||||
|
- Weniger Rueckfragen durch besser strukturierten Kontext
|
||||||
|
- Hoehere Vertrauenswuerdigkeit durch nachpruefbare Herkunft von Inhalten
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Gestalte eine abstrakte Chat-UI-Illustration mit links-rechts angeordneten Nachrichtenblasen, Quellensymbolen und einem dezenten Sprachsymbol fuer Voice-Interaktion. Keine echten Markenlogos. Design modern, klar, professionell. Helle Flaechen mit blauen Akzenten (#1976d2), leichte Tiefenwirkung. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**5 von 16**
|
||||||
|
|
||||||
|
## Seite 5 - Arbeitsbereich 2: Editor
|
||||||
|
*Schnelligkeit der KI – mit Ihrer letzten Freigabe.*
|
||||||
|
|
||||||
|
Im Editor werden KI-Vorschlaege nicht blind uebernommen, sondern kontrolliert geprueft. Aenderungen erscheinen im direkten Vergleich (Vorher und Nachher). Sie entscheiden pro Abschnitt oder gesamt: annehmen, ablehnen oder nachjustieren – analog zu professionellen Review-Prozessen in Recht, Compliance oder Technikredaktion.
|
||||||
|
|
||||||
|
**Warum das strategisch relevant ist:** Unternehmen wollen Tempo *und* Kontrolle. Der Editor verbindet beides: KI liefert Vorschlaege in grosser Geschwindigkeit, Ihre Organisation behaelt die letzte Instanz. Das senkt das Risiko ungewollter Formulierungen oder inhaltlicher Fehler in nach aussen gerichteten Dokumenten.
|
||||||
|
|
||||||
|
**Fuer wen besonders wertvoll:** Fachbereiche mit verbindlichen Texten (Vertraege, Richtlinien, Angebote), Projektleitungen mit Spezifikationen, Qualitaetssicherung und alle Teams, die wiederkehrend aehnliche Dokumente anpassen muessen.
|
||||||
|
|
||||||
|
**Mehrstufige Aufgaben:** Fuer umfangreichere Bearbeitungen kann die KI in einem gefuehrten Ablauf mehrere Schritte vorschlagen – stets mit der Moeglichkeit, vor der Uebernahme zu pruefen. So bleibt Effizienz mit Governance vereinbar.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Schnellere Bearbeitung von Dokumenten und Fachtexten bei gleichzeitiger Freigabe-Logik
|
||||||
|
- Weniger Korrekturschleifen durch klare Sicht auf jede Aenderung
|
||||||
|
- Skalierbare Qualitaet bei Standarddokumenten und Vorlagen
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine elegante Side-by-Side-Editor-Illustration mit Vorher-Nachher-Ansicht, farblich markierten Aenderungen und klaren Aktionsflaechen fuer Accept/Reject. Stil: clean enterprise software concept art, isometrisch oder halb-isometrisch. Dunkles Editorpanel kombiniert mit hellem UI-Rahmen. Primaerblau #1976d2, Akzentgruen und Rot sehr dezent. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**6 von 16**
|
||||||
|
|
||||||
|
## Seite 6 - Arbeitsbereich 3: Datenquellen
|
||||||
|
*Ihre Systeme werden zum nutzbaren Wissensraum.*
|
||||||
|
|
||||||
|
PowerOn Desktop verbindet bestehende Systeme mit dem Arbeitskontext Ihrer Teams. Typische Anbindungen umfassen etwa Microsoft 365 (SharePoint, OneDrive, Outlook, Teams), Google (Drive, Gmail), Ticketsysteme wie Jira oder ClickUp, sowie FTP und branchenspezifische Fachsysteme – jeweils dort, wo Ihre Organisation bereits arbeitet.
|
||||||
|
|
||||||
|
**Zwei praktische Ebenen:** Zum einen **persoenliche Quellen** des Nutzers (z. B. eigene Cloud-Bereiche), zum anderen **Quellen der konkreten Workspace-Instanz** und mandantenbezogene Daten – immer abgestimmt auf Ihre Rollen- und Freigaberegeln. Zusaetzlich koennen Dateien direkt im Workspace abgelegt, strukturiert und fuer die KI-Nutzung bereitgestellt werden (inkl. Drag-and-Drop).
|
||||||
|
|
||||||
|
**Der Effekt fuer den Alltag:** Statt Informationen manuell zu suchen, zusammenzukopieren und in einen Chat zu pasten, entsteht ein **durchsuchbarer Wissensraum**. Die KI bezieht sich auf Inhalte, die Sie bewusst freigegeben haben – nicht auf ein undurchsichtiges „Internet-Gedaechtnis“.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Entscheidungen und Antworten basieren auf *Ihren* Unterlagen, nicht auf Vermutungen
|
||||||
|
- Deutlich weniger Medienbrueche und Copy-Paste zwischen Systemen
|
||||||
|
- Schnellere Einarbeitung neuer Mitarbeitender durch einen klaren, gebundenen Wissenszugang
|
||||||
|
- Weniger Risiko veralteter oder falscher Versionen, weil der Bezug zur Quelle erhalten bleibt
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Visualisiere mehrere Unternehmensdatenquellen als abstrahierte Knoten (Dokumente, Cloud, Tickets, Mail) die in einen zentralen AI-Workspace-Hub fliessen. Zeige Struktur und Ordnung statt Chaos. Stil modern, isometrisch, B2B-Marketing. Farben: Blau #1976d2, Gruen fuer externe Quellen, Violett fuer Feature-Daten, neutraler Hintergrund. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**7 von 16**
|
||||||
|
|
||||||
|
## Seite 7 - Arbeitsbereich 4: Workflows und Automation
|
||||||
|
*Standardisierte Ablaeufe – transparent ausgefuehrt.*
|
||||||
|
|
||||||
|
Wiederkehrende Aufgaben werden als Workflows **einmal** sinnvoll definiert und danach **zuverlaessig** ausgefuehrt – manuell gestartet oder nach Plan. Das ist besonders relevant fuer wiederkehrende Reports, Datenaufbereitungen, Qualitaetschecks oder vorbereitende Schritte vor menschlicher Freigabe.
|
||||||
|
|
||||||
|
**Transparenz statt Blackbox:** Laufende und abgeschlossene Ausfuehrungen sind nachvollziehbar dokumentiert (Live-Logs und Status). Fuehrungskraefte sehen, *dass* und *wie* Automatisierung laeuft – wichtig fuer Vertrauen und interne Kontrolle.
|
||||||
|
|
||||||
|
**Rueckkopplung in den Workspace:** Ergebnisse aus Workflows werden nicht „irgendwo abgelegt“, sondern koennen als neuer Kontext fuer Chat, Editor und weitere Schritte dienen. So schliesst sich der Kreis von ad-hoc-Arbeit und standardisierten Ablaeufen.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Hoehere Prozessgeschwindigkeit bei gleichbleibender Qualitaet und weniger manuellen Fehlern
|
||||||
|
- Entlastung von Teams bei Routinethemen; mehr Kapazitaet fuer Urteils- und Beziehungsarbeit
|
||||||
|
- Bessere Skalierung ueber Teams, Standorte und Zeitzonen hinweg
|
||||||
|
- Einheitliche Standards statt Inselloesungen („jeder macht es anders“)
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine moderne Workflow-Illustration mit mehreren Prozessstufen, KI-Knoten und Rueckkopplung in ein zentrales Dashboard. Zeige klare Richtungspfeile, modulare Bausteine und Statusindikatoren. Stil: clean, enterprise, minimalistisch-isometrisch. Farbpalette mit Blau #1976d2 und Tuerkis-Akzenten. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**8 von 16**
|
||||||
|
|
||||||
|
## Seite 8 - USP: Intelligente Wissenssuche in drei Ebenen
|
||||||
|
*Das passende Wissen zur richtigen Zeit – ohne Rauschen.*
|
||||||
|
|
||||||
|
Stellen Sie sich drei **Schubladen** vor, die Ihre Organisation ohnehin kennt – nur dass sie hier technisch sauber getrennt und fuer die KI nutzbar gemacht werden:
|
||||||
|
|
||||||
|
- **Persoenlich:** Notizen, Entwuerfe und Dateien, die dem einzelnen Nutzer zuordenbar sind und nicht automatisch das ganze Team exponieren.
|
||||||
|
- **Team / Instanz:** Alles, was zu einem konkreten Projekt, einem Mandat-Workspace oder einer definierten Arbeitsgruppe gehoert – der gemeinsame Tisch fuer diesen Use Case.
|
||||||
|
- **Mandat / Unternehmen:** Von der Organisation freigegebenes Wissen (Richtlinien, Vorlagen, Standards), das breiter – aber weiterhin regelkonform – genutzt werden darf.
|
||||||
|
|
||||||
|
**Warum das mehr ist als „eine grosse Datenbank“:** Bei jeder Anfrage wird der **sinnvolle Ausschnitt** aus diesen Ebenen zusammengefuehrt. Antworten werden relevanter, Rauschen sinkt, und Sie vermeiden das typische Problem generischer KI-Tools: zu viel oder zu wenig Kontext, falsch gemischt.
|
||||||
|
|
||||||
|
**Fuer die Geschaeftsfuehrung:** Das Modell spiegelt reale Verantwortlichkeiten wider (individuell, teambezogen, unternehmensweit). So laesst sich KI-Nutzung **governancetauglich** erklaeren und auditieren – statt als undifferenzierte „alles-in-einen-Topf“-Loesung.
|
||||||
|
|
||||||
|
**Das Ergebnis im Alltag:** Schnellere, treffsichere Antworten, weniger irrelevante Treffer, klarere Grenzen zwischen privatem Arbeitskontext und geteiltem Wissen.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Gestalte eine abstrakte 3-Ebenen-Architektur als konzentrische Kreise oder gestapelte Ebenen: personal, team-instance, mandate-enterprise. Daten sollen von unten nach oben intelligent selektiert werden. Premium-SaaS-Look, klare Geometrie, moderne Schattierung. Blau #1976d2 als Hauptfarbe, Tuerkis und Violett fuer Ebenenunterscheidung. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**9 von 16**
|
||||||
|
|
||||||
|
## Seite 9 - USP: Privacy Shield
|
||||||
|
*Sensibel bleibt sensibel – auch mit KI.*
|
||||||
|
|
||||||
|
Datenschutz wird nicht erst in der Rechtsabteilung „nachgebessert“, sondern direkt im Arbeitsprozess verankert. **Privacy Shield** steht fuer eine kontrollierte Vorverarbeitung: Personenbezogene und besonders sensible Angaben (z. B. Namen, Kontaktdaten, typische Identifikatoren) koennen **vor** der eigentlichen KI-Verarbeitung geschuetzt werden, sodass weniger Rohdaten nach aussen gelangen.
|
||||||
|
|
||||||
|
**Was das praktisch bedeutet:** Teams arbeiten weiter mit echten Inhalten im Workspace. Fuer die Verarbeitung durch externe oder interne Modelle werden nur die Teile genutzt, die Sie policykonform freigeben. Ergebnisse bleiben dennoch inhaltlich nutzbar, weil die Zuordnung im geschuetzten Umfeld wiederhergestellt werden kann – ohne dass der Nutzer jedes Mal manuell anonymisieren muss.
|
||||||
|
|
||||||
|
**Gespraech mit Datenschutz und Compliance:** Sie koennen zeigen, *welche* Kategorie von Daten geschuetzt wird, *wann* das greift und *wer* welche Freigaben hat. Das erhoeht die Akzeptanz bei Datenschutzbeauftragten, Arbeitnehmervertretungen und Kunden mit strengen Auflagen.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- KI-Einsatz auch dort moeglich, wo sensible Inhalte allgegenwaertig sind (HR, Kundenakten, Vertraege)
|
||||||
|
- Geringeres regulatorisches und Reputationsrisiko bei schneller Pilotierung
|
||||||
|
- Hoeheres Vertrauen von Vorstand, Aufsicht und externen Pruefern
|
||||||
|
- Weniger „Schatten-KI“, weil der offizielle Weg sicher genug ist, um genutzt zu werden
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erzeuge eine abstrakte Cyber-Security-Illustration mit einem Schutzschild zwischen Datenstrom und KI-Kern. Zeige, dass sensible Daten vor Verarbeitung geschuetzt werden. Stil: clean, modern, enterprise trust visual. Keine Bedrohungs-Optik, sondern kontrollierte Sicherheit und Governance. Farben: Blau #1976d2, Tuerkis, dezentes Silber/Grau. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**10 von 16**
|
||||||
|
|
||||||
|
## Seite 10 - USP: Mandanten-Isolation
|
||||||
|
*Klare Grenzen – technisch abgebildet, nicht nur organisatorisch gewuenscht.*
|
||||||
|
|
||||||
|
Jeder **Mandant** – sei es ein Kunde, eine Tochtergesellschaft oder eine klar abgegrenzte Organisationseinheit – arbeitet in einem **eigenen, logisch getrennten Datenraum**. Daten und Wissensbestaende vermischen sich nicht zwischen Mandanten, selbst wenn dieselbe Plattform genutzt wird.
|
||||||
|
|
||||||
|
**Typische Traeger dieser Anforderung:** Treuhand und Revision, Beratung mit mehreren Auftraggebern, Konzerne mit strikten Firewalls zwischen Sparten, sowie jede Organisation, die **Vertraulichkeit** als Verkaufsargument oder gesetzliche Pflicht versteht.
|
||||||
|
|
||||||
|
**Need-to-know auf Plattform-Ebene:** Nutzer sehen nur, was ihre Rolle und ihr Mandat erlauben. Das unterstuetzt interne Kontrollsysteme und erleichtert die Kommunikation mit externen Pruefern: Trennung ist nicht nur organisatorisch gewuenscht, sondern **technisch abgebildet**.
|
||||||
|
|
||||||
|
**Skalierung ohne Grenzverlust:** Neue Mandanten oder neue Projekte lassen sich hinzufuegen, ohne bestehende Sicherheits- und Vertraulichkeitsmodelle zu verwaessern. Das ist ein Wachstumshebel fuer Dienstleister und fuer Konzerne mit komplexer Struktur.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Eignung fuer Multi-Client- und Multi-Brand-Setups ohne Datenvermischung
|
||||||
|
- Deutlich reduziertes Risiko von Vertraulichkeitsverletzungen und „falschen“ Zugriffen
|
||||||
|
- Bessere Argumentationsgrundlage gegenueber Kunden, die Trennschaerfe verlangen
|
||||||
|
- Kontrollierbares Wachstum: mehr Nutzung, nicht mehr Risiko pro Nutzer
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Visualisiere mehrere sauber getrennte Datenbereiche als leuchtende, voneinander isolierte Cluster oder Glas-Container, jeweils mit eigenem Zugangspfad. Zeige Ordnung, Trennung und Sicherheit in einer modernen Enterprise-Aesthetik. Farben: Blau #1976d2 als verbindendes System, unterschiedliche Akzentfarben pro Mandant. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**11 von 16**
|
||||||
|
|
||||||
|
## Seite 11 - USP: Datenkontrolle durch den Nutzer
|
||||||
|
*Sie steuern, welcher Kontext zaehlt – ohne stillschweigende Weitergabe.*
|
||||||
|
|
||||||
|
Mit **klaren Sichtbarkeitsstufen** entscheiden Nutzer und Rolleninhaber aktiv, welche Inhalte in welchem Kontext fuer die KI nutzbar sind. Es gibt **keine stillschweigende** Weitergabe: Was geteilt wird, wird bewusst eingestellt – beim Einbinden von Dateien, Ordnern oder Quellen.
|
||||||
|
|
||||||
|
**Die vier Stufen in Klartext:**
|
||||||
|
- **Persoenlich:** Nur fuer den anlegenden Nutzer sichtbar und nutzbar – ideal fuer Entwuerfe und persoenliche Arbeitsunterlagen.
|
||||||
|
- **Instanzbezogen:** Fuer alle, die Zugriff auf genau diese Workspace-Instanz haben – typisch fuer Projekt- oder Teamarbeitsraeume.
|
||||||
|
- **Mandatsweit:** Fuer die gesamte Mandantenorganisation freigegeben – etwa Richtlinien, die jeder mit Mandatszugang nutzen darf.
|
||||||
|
- **Global (kontrolliert):** Plattformweite Referenzinhalte, typischerweise **stark reglementiert** und oft nur lesend – z. B. offizielle Standards, die zentral gepflegt werden.
|
||||||
|
|
||||||
|
**Zusaetzliche Hebel:** Inhalte koennen mit einer **Schutz-Option** markiert werden (Vorverarbeitung / Neutralisierung), bevor sie in die KI-Pipeline gehen. Aenderungen an Sichtbarkeit oder Schutz koennen eine Neu-Einordnung im Wissensindex erfordern – damit bleibt das System konsistent mit Ihren Regeln.
|
||||||
|
|
||||||
|
**Warum das Fuehrungskraefte interessiert:** Sie reduzieren **Fehlbedienung** und **Social Engineering** im weitesten Sinne – nicht jede Datei landet aus Versehen im falschen Kontext. Datenschutz und Informationsklassifikation werden **operationalisierbar**, nicht nur Policy-Papier.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine abstrakte Control-Panel-Illustration mit Scope-Umschaltern, Toggle-Elementen und klaren Zugriffsebenen. Fokus auf User-Kontrolle und Transparenz. Stil: reduziertes High-End SaaS Interface Concept, flach-isometrisch, aufgeraeumt. Primaerfarbe Blau #1976d2, Akzente in Tuerkis und Violett. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**12 von 16**
|
||||||
|
|
||||||
|
## Seite 12 - USP: Multi-Model AI Orchestrierung
|
||||||
|
*Flexibilitaet im Modellmarkt – unter Ihren Freigaben und Richtlinien.*
|
||||||
|
|
||||||
|
PowerOn Desktop ist **nicht** an einen einzelnen KI-Anbieter gebunden. Hinter den Kulissen waehlt die Plattform passend zur Aufgabe: mal ein Modell, das besonders gut bei **langer Textarbeit** ist, mal eines fuer **schnelle Antworten**, mal Spezialfaehigkeiten fuer **Bildanalyse** oder **Strukturierung** – immer im Rahmen Ihrer Freigaben und Richtlinien.
|
||||||
|
|
||||||
|
**Was das fuer Einkauf und IT bedeutet:** Sie vermeiden **Single-Source-Abhaengigkeiten** und behalten Verhandlungsmacht. Wenn ein Anbieter Preise aendert, Qualitaet schwankt oder Verfuegbarkeit leidet, ist die Plattform darauf vorbereitet, **auszuweichen** – ohne dass Endanwender sofort umlernen muessen.
|
||||||
|
|
||||||
|
**Betrieb und Risiko:** Ausfallsicherheit steigt, weil kritische Pfade nicht von einem einzigen Dienst abhaengen. Gleichzeitig laesst sich **Kosten und Leistung** feiner steuern: teurere Modelle dort, wo der Mehrwert hoch ist; sparsamere Varianten bei einfachen Routinefragen.
|
||||||
|
|
||||||
|
**Governance bleibt obenauf:** Welche Modelle wer nutzen darf, bleibt **rollen- und mandantenbezogen** steuerbar – Innovation ohne Kontrollverlust.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Strategische Flexibilitaet in einem sich schnell veraendernden KI-Markt
|
||||||
|
- Bessere Ergebnisqualitaet, weil Werkzeug und Aufgabe zusammenpassen
|
||||||
|
- Hoehere Verfuegbarkeit und Resilienz im Tagesbetrieb
|
||||||
|
- Transparente Kostenlogik statt undurchsichtiger Flatrates ohne Steuerung
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Visualisiere mehrere abstrakte KI-Modelle als unterschiedliche Rechenkerne, die in einen zentralen Orchestrator laufen. Zeige Lastverteilung, Routing und Ausfallsicherheit. Stil: futuristisch, aber business-tauglich, clean und nicht verspielt. Farbschema: Blau #1976d2, Cyan, Violett, dunkler Hintergrund mit sanften Highlights. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**13 von 16**
|
||||||
|
|
||||||
|
## Seite 13 - USP: Swiss Made, Governance und Compliance
|
||||||
|
*Innovation mit Leitplanken – fuer Vorstand, Aufsicht und Pruefer nachvollziehbar.*
|
||||||
|
|
||||||
|
PowerOn Desktop verbindet **Innovationsgeschwindigkeit** mit **klaren Governance-Leitplanken**. Als Schweizer Anbieter adressieren wir den Erwartungsstandard vieler mittelstaendischer und grosser Organisationen: Qualitaet, Verlaesslichkeit und ein angemessener Umgang mit Datenschutz (DSG) und – wo relevant – DSGVO.
|
||||||
|
|
||||||
|
**Was „Governance“ hier konkret heisst:**
|
||||||
|
- **Rollen und Rechte:** Wer darf welche Features, Datenquellen und KI-Modelle nutzen?
|
||||||
|
- **Nachvollziehbarkeit:** Welche Schritte und Quellen haben zu einem Ergebnis beigetragen – zumindest dort, wo es fuer interne Kontrolle noetig ist?
|
||||||
|
- **Mandanten- und Instanzlogik:** Klare Grenzen zwischen Organisationen, Projekten und persoenlichem Raum.
|
||||||
|
- **Betriebsreife:** Kein reines „Labor-Tool“, sondern eine Struktur, mit der sich KI **breit** ausrollen laesst.
|
||||||
|
|
||||||
|
**Fuer Vorstand und Aufsicht:** Sie erhalten eine erzaehlbare Geschichte: KI ist eingebettet in Regeln, Trennungen und Freigaben – nicht eine anonyme Chat-Box aus dem Internet. Das erleichtert Freigaben, Versicherungs- und Partnerfragen sowie die Zusammenarbeit mit externen Pruefern.
|
||||||
|
|
||||||
|
**Business-Nutzen:**
|
||||||
|
- Verlaessliche Grundlage fuer strategische KI-Programme und Budgetentscheide
|
||||||
|
- Bessere Auditierbarkeit in sensiblen Bereichen (Finance, Legal, HR, Kundenprojekte)
|
||||||
|
- Weniger Schatten-KI, weil der offizielle Weg attraktiv *und* sicher ist
|
||||||
|
- Staerkere Positionierung gegenueber Kunden und Partnern, die Compliance explizit einfordern
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erstelle eine hochwertige Compliance-Illustration mit Schweizer Referenz (abstrakte Alpenlinie oder dezente Swiss-Form), Security-Symbolen, Audit-Pfaden und Governance-Elementen. Stil: premium corporate, clean, vertrauensvoll, modern. Farben: Blau #1976d2, Weiss, dezentes Rot als kleiner Akzent. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**14 von 16**
|
||||||
|
|
||||||
|
## Seite 14 - Business Impact
|
||||||
|
*Zeit, Qualitaet, Skalierung, Compliance – messbar adressiert.*
|
||||||
|
|
||||||
|
PowerOn Desktop liefert Wirkung in vier strategischen Dimensionen:
|
||||||
|
|
||||||
|
**1) Zeitgewinn**
|
||||||
|
- Schnellere Informationssuche und Entscheidungsvorbereitung
|
||||||
|
- Weniger Tool-Wechsel und manuelle Zwischenschritte
|
||||||
|
|
||||||
|
**2) Qualitaet**
|
||||||
|
- Konsistentere Ergebnisse durch gemeinsamen Kontext
|
||||||
|
- Hoehere Nachvollziehbarkeit durch Quellen und Prozesssicht
|
||||||
|
|
||||||
|
**3) Skalierung**
|
||||||
|
- Wiederverwendbare Workflows statt Einzelfallarbeit
|
||||||
|
- Schnellere Uebertragung von Best Practices zwischen Teams
|
||||||
|
|
||||||
|
**4) Compliance**
|
||||||
|
- Strukturierte Datenkontrolle und klare Rollenlogik
|
||||||
|
- Bessere Grundlage fuer interne und externe Pruefungen
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Gestalte eine moderne Business-Impact-Illustration mit vier gleichwertigen Saeulen oder KPI-Kacheln: Geschwindigkeit, Qualitaet, Skalierung, Compliance. Zeige positive Dynamik, klare Struktur und Executive-Level-Aesthetik. Farben: Blau #1976d2 dominiert, Tuerkis und Violett als Sekundaerakzente, heller Hintergrund. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**15 von 16**
|
||||||
|
|
||||||
|
## Seite 15 - So starten Sie
|
||||||
|
*Vom ersten Gespraech bis zum messbaren Ergebnis – in vier Schritten.*
|
||||||
|
|
||||||
|
Ein erfolgreicher Einstieg folgt einem klaren, risikoarmen Vorgehen:
|
||||||
|
|
||||||
|
1. **Discovery Call (30 Min.)**
|
||||||
|
Ziele, Prioritaeten und kritische Use Cases abstimmen.
|
||||||
|
2. **Workspace Blueprint**
|
||||||
|
Datenquellen, Rollen und Governance-Rahmen definieren.
|
||||||
|
3. **MVP in kurzer Zeit**
|
||||||
|
Ein produktiver Kern-Use-Case mit messbarem Ergebnis.
|
||||||
|
4. **Scale-Up**
|
||||||
|
Weitere Teams, Prozesse und Automationen schrittweise ausrollen.
|
||||||
|
|
||||||
|
Dieser Ansatz schafft schnelle Erfolge ohne strategische Ueberdehnung.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Erzeuge eine visuelle Roadmap mit vier klaren Etappen von links nach rechts: Discover, Blueprint, MVP, Scale. Nutze abstrakte Milestones, verbindende Linien und Fortschrittsdynamik. Stil: hochwertig, clean, enterprise consulting look. Farben: Blau #1976d2, Tuerkis-Akzente, viel Luft und Ordnung. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
**16 von 16**
|
||||||
|
|
||||||
|
## Seite 16 - Kontakt und Team
|
||||||
|
*Ihr Einstieg in PowerOn Desktop – persoenlich und strukturiert.*
|
||||||
|
|
||||||
|
**PowerOn AG**
|
||||||
|
Birmensdorferstrasse 94, 8003 Zuerich (CH)
|
||||||
|
[www.poweron.swiss](https://www.poweron.swiss)
|
||||||
|
|
||||||
|
**Team**
|
||||||
|
- Patrick Motsch - CEO/CTO
|
||||||
|
- Ida Dittrich - Product Architect
|
||||||
|
- Stephan Schellworth - Business Integration
|
||||||
|
|
||||||
|
Wenn Sie KI im Tagesgeschaeft produktiv und kontrolliert verankern wollen, starten wir mit einem klaren ersten Schritt.
|
||||||
|
|
||||||
|
> **BILD-PROMPT (Nano Banana Pro):**
|
||||||
|
> Gestalte ein minimalistisches, professionelles Abschlussvisual fuer ein B2B-Pitchdeck: abstraktes Team-/Unternehmensmotiv mit einem zentralen Hub, verbundenen Punkten und vertrauensvoller Corporate-Atmosphaere. Stil clean, modern, hochwertig, nicht verspielt. Farbpalette: Blau #1976d2, Weiss, dezentes Grau, leichter Tuerkis-Akzent. Kein Text im Bild. 16:9.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Keywords / Tags
|
||||||
|
|
||||||
|
PowerOn Desktop, AI Workspace, intelligente Wissenssuche, Datenschutz, Privacy Shield, Mandanten-Isolation, Datenkontrolle, Multi-Model AI, Workflow Automation, C-Level KI-Strategie, DSG, DSGVO, Swiss Made, Governance, Compliance, Datenquellen, Enterprise SaaS
|
||||||
217
docs/case-study-poweron-48h-agent.md
Normal file
217
docs/case-study-poweron-48h-agent.md
Normal file
|
|
@ -0,0 +1,217 @@
|
||||||
|
# Case Study (Illustration / Template): PowerOn Launch48
|
||||||
|
|
||||||
|
**Wichtig:** Das **primaere Kundenangebot** zum Weitergeben ist **[poweron-launch48-offer.md](./poweron-launch48-offer.md)** – verstaendlich fuer Management und Fachbereiche.
|
||||||
|
**Dieses Dokument** dient **nicht** als erstes Verkaufs-PDF: Es ist ein **Beispiel-Verlauf / Pilot-Template** zur Vertiefung, sobald Sie Referenzgeschichten brauchen. Alle **Kundendaten, Branche und Kennzahlen** sind **fiktiv oder anonymisiert**, bis ein reales Projekt mit schriftlicher Freigabe vorliegt.
|
||||||
|
**Referenz fuer Liefermethodik:** AI-augmented Engineering (vergleichbar der dokumentierten **Abraxas DATA Hub Migration** – Kundennennung in oeffentlichen Materialien nur mit Freigabe).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
|
||||||
|
**Ausgangslage (Beispiel):** Ein **mittelstaendisches Dienstleistungsunternehmen** (anonymisiert) hatte wiederkehrende **Kundenanfragen zu Vertrags- und Leistungsinhalten**, die heute aus **PDF-Handbuechern, E-Mail-Templates und internen Notizen** manuell beantwortet werden. Die Bearbeitungszeit pro Anfrage war hoch, die Qualitaet von der Erfahrung der jeweiligen Person abhaengig.
|
||||||
|
|
||||||
|
**PowerOn** fuehrte mit dem Paket **Launch48** einen **48-Stunden-Block** auf der **PowerOn-Plattform** durch. Ergebnis (Zielbild des Templates): **ein produktiv einsetzbarer KI-Assistent** mit angebundenen **internen Quellen**, definierter **Pilotgruppe** und **vereinbarten Erfolgszielen** fuer die zweite Zahlungsstufe.
|
||||||
|
|
||||||
|
**Kernresultat (Illustration):** Von **Kickoff** bis **Uebergabe** **2 Arbeitstage** intensiver Umsetzung; schneller **Mehrwert im Pilot** statt monatelanger Vorlauf; laufende **Pruefung durch Ihre Fachexpertinnen und Experten** fuer Qualitaet und Compliance.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Projekteckdaten
|
||||||
|
|
||||||
|
| Aspekt | Detail (Template / anonymisiert) |
|
||||||
|
| --- | --- |
|
||||||
|
| **Kunde** | Anonymisiertes Dienstleistungsunternehmen, deutschsprachige Schweiz |
|
||||||
|
| **Plattform** | PowerOn (Power Desktop / AI Workspace, Datenquellen, Automation) |
|
||||||
|
| **Use-Case** | Erstbeantwortung und Strukturierung von wiederkehrenden Fachanfragen aus freigegebenen internen Unterlagen |
|
||||||
|
| **Sprint-Dauer** | 48 Stunden gebundene Umsetzung (plus Vorlauf fuer Gates) |
|
||||||
|
| **Umfang** | 1 Agent/Workflow, 3 Wissensquellen (Beispiel), 1 Integration (Beispiel: internes Ticket-Read) |
|
||||||
|
| **PowerOn Team** | Patrick Motsch (Technische Leitung), Ida Dittrich (Architektur), Stephan Schellworth (Projektsteuerung) |
|
||||||
|
| **Kunde Team** | Fach-Owner, IT-Ansprechpartner, 10 Pilotnutzer (Beispiel) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Die Herausforderung
|
||||||
|
|
||||||
|
### Fachliche und technische Ausgangslage
|
||||||
|
|
||||||
|
- **Wissen verteilt** in PDFs, geteilten Ablagen und persoenlichen Entwuerfen.
|
||||||
|
- **Kein einheitlicher Erstkontakt**: Mitarbeitende formulieren Antworten neu – Inkonsistenz und laengere Durchlaufzeiten.
|
||||||
|
- **Datenschutz**: Kundenbezogene Details duerfen nicht in generische KI-Tools ohne Kontrolle.
|
||||||
|
|
||||||
|
### Business Impact der Ausgangslage
|
||||||
|
|
||||||
|
- Hoher **Zeitaufwand** pro Standardanfrage.
|
||||||
|
- **Skalierungsbremse** bei Wachstum (Onboarding neuer Mitarbeitender).
|
||||||
|
- **Risiko** unterschiedlicher Antwortqualitaet und laengerer Reaktionszeiten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Der PowerOn-Ansatz
|
||||||
|
|
||||||
|
### AI-Augmented Delivery auf der Plattform
|
||||||
|
|
||||||
|
PowerOn setzt auf **Human-in-the-Loop**: KI beschleunigt Aufbau und Iteration, **Architektur und Freigaben** bleiben beim erfahrenen Team und beim Kunden.
|
||||||
|
|
||||||
|
**Kernprinzip:** Schnelligkeit durch KI-gestuetzte Umsetzung, **Qualitaet** durch Reviews, **Governance** durch PowerOn-Faehigkeiten (Quellen, Rollen, nachvollziehbare Ablaeufe).
|
||||||
|
|
||||||
|
### Phase 1: Use-Case & Impact (Vorlauf + Sprint-Start)
|
||||||
|
|
||||||
|
**Aktivitaeten:**
|
||||||
|
|
||||||
|
- Priorisierung eines **einzigen** Kernprozesses.
|
||||||
|
- Definition von **3 KPIs** (z. B. Zeit pro Vorgang, Pilot-Zufriedenheit, Fehlerindikator).
|
||||||
|
- Scope-Freeze fuer den Fixpreis.
|
||||||
|
|
||||||
|
**Deliverables:**
|
||||||
|
|
||||||
|
- Schriftliche **Scope- und KPI-Spezifikation**.
|
||||||
|
- **Go/No-Go** nach Compliance-Freigabe.
|
||||||
|
|
||||||
|
### Phase 2: Wissensbasis
|
||||||
|
|
||||||
|
**Aktivitaeten:**
|
||||||
|
|
||||||
|
- Anbindung von **Handbuch-PDFs**, **FAQ-Dokument** und **freigegebenem SharePoint-Ordner** (Beispiel).
|
||||||
|
- Zuordnung zu **Instanz-/Mandantenlogik** gemaess Rollen.
|
||||||
|
|
||||||
|
**Deliverables:**
|
||||||
|
|
||||||
|
- Indexierte Quellen im PowerOn-Workspace.
|
||||||
|
- Kurz-Dokumentation, welche Inhalte **nicht** im Agent-Kontext liegen (Grenzen).
|
||||||
|
|
||||||
|
### Phase 3: Tools & Anbindung
|
||||||
|
|
||||||
|
**Aktivitaeten:**
|
||||||
|
|
||||||
|
- **Eine** Integration im vereinbarten Umfang: z. B. **Lesen** von Ticket-Metadaten fuer Kontext (kein Schreiben in Produktion im Template-Beispiel).
|
||||||
|
- Festlegung von **Freigaben** und Testfaellen.
|
||||||
|
|
||||||
|
**Deliverables:**
|
||||||
|
|
||||||
|
- Funktionsfaehiger Integrationspfad in der Pilotumgebung.
|
||||||
|
- Testprotokoll (Grundfaelle).
|
||||||
|
|
||||||
|
### Phase 4: 48h Build-Sprint
|
||||||
|
|
||||||
|
**Aktivitaeten:**
|
||||||
|
|
||||||
|
- Gemeinsame Umsetzung mit **Pairing** (Kunde + PowerOn).
|
||||||
|
- Iterative Tests mit **realistischen Anfragen**.
|
||||||
|
- **Runbook** und **Handover**.
|
||||||
|
|
||||||
|
**Deliverables:**
|
||||||
|
|
||||||
|
- **Einsatzbereiter Agent/Workflow** im Pilot.
|
||||||
|
- **Runbook** + Enablement-Session.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Execution – Das Herzstueck
|
||||||
|
|
||||||
|
1. **Strukturierte Zielvorgaben** aus Phase 1–3.
|
||||||
|
2. **Plattformnahe Umsetzung** (kein „einmaliger Skript-Hack“ ausserhalb des Betriebsmodells).
|
||||||
|
3. **Validierung** durch Fach-Owner und Architektur-Review.
|
||||||
|
4. **Test mit Pilotnutzern** vor KPI-Messfenster.
|
||||||
|
|
||||||
|
**Effizienzgewinn (Illustration):** Statt mehrwoechiger interner Experimentierphase entsteht in **48 Stunden** ein **abnahmefaehiger** Pilot mit klarer Messgroesse.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing & Uebergabe
|
||||||
|
|
||||||
|
- **Pilotgruppe** (z. B. 10 Nutzer) fuer **10 Arbeitstage** nach Uebergabe.
|
||||||
|
- **Sammelfeedback** und kleine Nachjustierungen im vereinbarten Rahmen (optional als Zusatzleistung klaeren).
|
||||||
|
- **Auswertung der Erfolgsziele** zum vertraglichen Stichtag → Basis fuer die **CHF 7’000**-Komponente von **Launch48**.
|
||||||
|
|
||||||
|
**Enablement:** Das Ziel ist **Autonomie**: Internes Team versteht Grenzen, Bedienung und Eskalationspfad – analog zur **Enablement-Philosophie** bei groesseren PowerOn-Projekten (vgl. Wissenstransfer in der **Abraxas**-Methodendokumentation, sofern intern referenziert).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Business Impact (Illustrationsbandbreiten)
|
||||||
|
|
||||||
|
*Hinweis: Zahlen erst mit echtem Projekt ersetzen.*
|
||||||
|
|
||||||
|
| Dimension | Illustrative Aussage |
|
||||||
|
| --- | --- |
|
||||||
|
| **Time-to-Value** | Produktiver Pilot in **Tagen** statt **Monaten** |
|
||||||
|
| **Zeit pro Vorgang** | Ziel z. B. **25–40 %** Reduktion nach Baseline |
|
||||||
|
| **Qualitaet** | Weniger Streuung durch einheitliche Wissensbasis |
|
||||||
|
| **Risiko** | Weniger Shadow-AI durch **freigegebene** Plattform |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Lessons Learned (generisch, aus Sprints dieser Art)
|
||||||
|
|
||||||
|
1. **Scope schlaegt Feature-Wunschliste** – ein scharfer Use-Case traegt KPIs.
|
||||||
|
2. **Gates sparen Zeit** – Compliance und Zugang vor dem Sprint klaeren.
|
||||||
|
3. **Human-in-the-Loop** – verhindert Halluzinationen im produktiven Kontext.
|
||||||
|
4. **Runbook ist Produkt** – ohne Dokumentation sinkt Adoption.
|
||||||
|
|
||||||
|
| Herausforderung | Loesung |
|
||||||
|
| --- | --- |
|
||||||
|
| Unklare Verantwortung Fach/IT | Zwei benannte Owner von Tag 1 |
|
||||||
|
| Zu grosse Wissensmenge | Priorisierte Quellen, spaetere Erweiterung |
|
||||||
|
| Integration komplexer als gedacht | Frueh Spike oder Scope auf „read-only“ reduzieren |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technische Details (Beispiel-Stack)
|
||||||
|
|
||||||
|
**PowerOn:**
|
||||||
|
|
||||||
|
- Power Desktop / AI Workspace
|
||||||
|
- Datenquellen (z. B. SharePoint, Uploads)
|
||||||
|
- Automation / Workflow (je nach Use-Case)
|
||||||
|
- Rollen und Sichtbarkeit gemaess Organisationsmodell
|
||||||
|
|
||||||
|
**Optional erwaehnt im echten Case:**
|
||||||
|
|
||||||
|
- Spezifische Modelle/Provider nur nach Kundenfreigabe dokumentieren.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Projektorganisation
|
||||||
|
|
||||||
|
| Meilenstein | Zeit (Beispiel) |
|
||||||
|
| --- | --- |
|
||||||
|
| Kickoff & Gates | Woche -1 |
|
||||||
|
| Sprint Tag 1 | z. B. Do |
|
||||||
|
| Sprint Tag 2 | z. B. Fr |
|
||||||
|
| Handover | Ende Tag 2 |
|
||||||
|
| KPI-Messfenster | 10 Arbeitstage |
|
||||||
|
| Auswertung | Stichtag laut Vertrag |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verbindung zur Abraxas-Methodik (interner Verweis)
|
||||||
|
|
||||||
|
Die **Abraxas DATA Hub Migration** zeigte: **strukturierte Analyse**, **Architekturentscheide mit Review**, **KI-gestuetzte Execution** und **Enablement** liefern **hohe Geschwindigkeit bei produktionsreifer Qualitaet**. **Launch48** uebertraegt diese Prinzipien auf **kleinere, scharf umrissene KI-Piloten** auf der **PowerOn-Plattform** – mit **Fixpreis** und **vereinbarten Erfolgszielen** fuer die zweite Zahlungsstufe.
|
||||||
|
|
||||||
|
*Oeffentliche Zitate oder Logos von Abraxas nur mit schriftlicher Freigabe.*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Fazit
|
||||||
|
|
||||||
|
**Launch48** macht aus einem konkreten Alltags-Engpass einen **messbaren Piloten** auf **PowerOn** – schnell, mit klaren Leitplanken und ohne Monatsprojekt-Pflicht. Nach dem ersten echten Kundenprojekt: dieses Template durch **verifizierte Kennzahlen**, **Zitate** und **freigegebenen Namen** ersetzen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Freigaben (Checkliste – legal / kommerziell)
|
||||||
|
|
||||||
|
- [ ] Entscheid: Duerfen wir **Abraxas** als Referenz **namentlich** nennen?
|
||||||
|
- [ ] Entscheid: Duerfen wir **diesen** Pilot-Kunden nennen?
|
||||||
|
- [ ] Template-Kennzeichnung auf Website/PDF: **„Beispielszenario“** bis zur Finalversion.
|
||||||
|
- [ ] KPI-Formulierungen von Recht/Finance geprueft.
|
||||||
|
- [ ] Screenshots nur mit anonymisierten Daten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Referenzen
|
||||||
|
|
||||||
|
- **Kundenangebot (primaer):** [poweron-launch48-offer.md](./poweron-launch48-offer.md)
|
||||||
|
- Konzept (intern): [concept-poweron-48h-agent-offer.md](./concept-poweron-48h-agent-offer.md)
|
||||||
|
- Flyer: [flyer-poweron-48h-agent.md](./flyer-poweron-48h-agent.md)
|
||||||
|
- Plattform-Ueberblick: [product-teaser-poweron.md](./product-teaser-poweron.md)
|
||||||
|
- PowerOn Desktop Story (Marketingtiefe): [case-study-power-desktop.md](./case-study-power-desktop.md)
|
||||||
|
|
||||||
246
docs/concept-poweron-48h-agent-offer.md
Normal file
246
docs/concept-poweron-48h-agent-offer.md
Normal file
|
|
@ -0,0 +1,246 @@
|
||||||
|
# PowerOn Launch48 – Konzeptdokument (intern)
|
||||||
|
*Produktisiertes Angebot: KI auf PowerOn in 48 Stunden (Fixpreis, Erfolgsziele gestaffelt)*
|
||||||
|
|
||||||
|
**Kundenfaehiges Angebot zum Teilen:** [poweron-launch48-offer.md](./poweron-launch48-offer.md)
|
||||||
|
|
||||||
|
**Version:** 1.1 (Entwurf zur internen Freigabe)
|
||||||
|
**Bezug:** [product-teaser-poweron.md](./product-teaser-poweron.md), [case-study-power-desktop.md](./case-study-power-desktop.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Elevator Pitch
|
||||||
|
|
||||||
|
Viele Teams verlieren Kapazitaet an wiederkehrende Routine, waehrend KI-Piloten in Einzeltools haengen bleiben – ohne klare Datenhoheit und ohne messbaren Betriebsmehrwert. **Launch48** ist ein **48-Stunden-Sprint** auf der **PowerOn Enterprise-KI-Orchestrierungsplattform**: Gemeinsam mit Ihren und unseren Entwickler\*innen entsteht **ein konkreter, produktiv nutzbarer KI-Agent** (inkl. definierter Wissensbasis und Systemanbindung im vereinbarten Umfang). **Fixpreis CHF 9’000**, aufgeteilt in **CHF 2’000** bei Vertragsstart und **CHF 7’000** bei Erreichen **vorab definierter KPIs** – Ergebnis und Scope sind schriftlich fixiert, nicht „Stunden ohne Ende“.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Category-Zeile (Marketing)
|
||||||
|
|
||||||
|
**„Launch48: In 48 Stunden von Use-Case zu produktivem KI-Agenten auf PowerOn – mit messbarem Erfolg.“**
|
||||||
|
|
||||||
|
Alternativ techniknaeher (IT-Persona): **„Orchestrierter 48h-Sprint: Agent, Datenkontext und Integration – auf Ihrer PowerOn-Instanz.“**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Ideal Customer Profile (ICP) und Ausschluss
|
||||||
|
|
||||||
|
### 3.1 ICP
|
||||||
|
|
||||||
|
- **Organisationen** in der Schweiz / DACH: KMU, Mittelstand, Software-Haeuser, Dienstleister mit wiederkehrenden Wissens- oder Verarbeitungsprozessen.
|
||||||
|
- **Ein klarer Kern-Use-Case** mit greifbarem Input/Output (z. B. Anfragebeantwortung aus internen Unterlagen, Vorbereitung standardisierter Antworten, erste Stufe Qualitaets-/Plausibilitaetschecks, strukturierte Extraktion aus definierten Dokumenten).
|
||||||
|
- **Bereitschaft**, technische Ansprechpartner, Testdaten und **Zugang zu den vereinbarten Quellen/Systemen** waehrend des Sprints bereitzustellen.
|
||||||
|
- **PowerOn** als Zielplattform akzeptiert (oder Pilot-Instanz wird fuer den Sprint bereitgestellt).
|
||||||
|
|
||||||
|
### 3.2 Ausschluss (kein Launch48 ohne Anpassung)
|
||||||
|
|
||||||
|
- Reine **Strategie-Workshops** ohne System- und Datenzugang.
|
||||||
|
- **„KI fuer alles“** ohne priorisierten Use-Case.
|
||||||
|
- Erwartung einer **vollstaendigen Unternehmens-Transformation** in 48 Stunden.
|
||||||
|
- Use-Cases mit **hochreguliertem Alleingang** ohne vorherige Compliance-/Datenschutz-Freigaben (Sprint verschieben bis Gate erfuellt).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Differenzierung (Orientierung am Markt)
|
||||||
|
|
||||||
|
| Aspekt | Typischer „AI-Hackathon / One-Day-Agent“-Stil | **Launch48 (PowerOn)** |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **Dauer** | Oft 1 Tag vor Ort / komprimiert | **48 Stunden** gebuegelter Sprint inkl. Vorbereitung und Uebergabe |
|
||||||
|
| **Traeger** | Oft generisch / tool-offen | **PowerOn-Plattform**: Workspace, Datenquellen, Automation, Governance |
|
||||||
|
| **Daten & Privacy** | hauefig implizit | **Explizit**: Mandantenlogik, Sichtbarkeitsstufen, Privacy-Shield-Ansatz (siehe Desktop-Story) |
|
||||||
|
| **Lieferobjekt** | „funktionale KI-Loesung“ (breit) | **Ein Agent/Workflow** im definierten Scope + Runbook + Enablement |
|
||||||
|
| **Preislogik** | variabel | **Fixpreis CHF 9’000**, **CHF 7’000** an **messbare KPIs** gekoppelt |
|
||||||
|
| **Beweis** | Referenzen variabel | **Methoden-Proof:** u. a. AI-augmented Delivery (z. B. Abraxas DATA Hub Migration – siehe separate Case Study; Nennung nur mit Kundenfreigabe) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Vier Phasen (PowerOn-spezifisch)
|
||||||
|
|
||||||
|
Die Phasen sind inhaltlich mit gaengigen „Ideation → Data → Integration → Build“-Modellen vergleichbar, aber **konkret auf PowerOn** gebaut:
|
||||||
|
|
||||||
|
1. **Use-Case & Impact** – Priorisierung eines Szenarios mit hohem Business-Impact; Definition von **Erfolgskriterien und KPIs**; Abgrenzung In-/Out-of-Scope.
|
||||||
|
2. **Wissensbasis** – Aufbau/Anbindung der vereinbarten **Dokumente und Datenquellen** im PowerOn-Kontext (persoenlich / Instanz / Mandat gemaess Rollenmodell).
|
||||||
|
3. **Tools & Anbindung** – Auswahl und Umsetzung der **optimalen Integrationen** (z. B. APIs, konfigurierte Quellen, Automation-Trigger) im vereinbarten Rahmen; Freigaben und Berechtigungen.
|
||||||
|
4. **Build-Sprint (48h)** – Gemeinsame Umsetzung mit **Pairing** zwischen Kunden- und PowerOn-Team; Reviews, Tests, Uebergabe.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Scope-Grenzen (Vorschlag zur internen Finalisierung)
|
||||||
|
|
||||||
|
*Die folgenden Groessen ermoeglichen einen verteidigbaren Fixpreis. Zahlen intern verbindlich festlegen und im Angebot/Vertrag ersetzen.*
|
||||||
|
|
||||||
|
### 6.1 Im Standard-Scope (empfohlen)
|
||||||
|
|
||||||
|
| Parameter | Vorschlag | Hinweis |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **Hauptlieferobjekt** | 1 **Agent bzw. 1 klar definierter Workflow/Automation** auf PowerOn | Erweiterung = Change Request |
|
||||||
|
| **Workspace** | 1 **PowerOn-Instanz** bzw. 1 Mandanten-Workspace | Mehr Instanzen = Zusatz |
|
||||||
|
| **Wissensquellen** | Bis **3 Quellen** (z. B. SharePoint-Bibliothek, definierter Ordner, CSV/FAQ-Dokumente) | „Quelle“ = fachlich abgegrenztes Bundle |
|
||||||
|
| **Dokumentenvolumen (indikativ)** | Bis ca. **500 MB** indexierbarer Inhalt **oder** bis ca. **2’000** Seiten-Aequivalent | Grobmasstab; technische Validierung im Gate |
|
||||||
|
| **Integrationen** | **1** zusaetzliche Systemanbindung im vereinbarten Umfang (z. B. ein REST-Webhook, ein definierter Connector) | Komplexe ERP-Tiefenintegration oft ausserhalb |
|
||||||
|
| **Nutzer-Pilotgruppe** | Bis **15** aktive Testnutzer fuer KPI-Messung | Skalierung danach |
|
||||||
|
| **Enablement** | **1** Live-Handover (60–90 Min.) **oder** Kurzvideo (30 Min.) + **Runbook** (Markdown/PDF) | |
|
||||||
|
|
||||||
|
### 6.2 Ausserhalb Standard-Scope (Zusatzangebot)
|
||||||
|
|
||||||
|
- Mehrere unabhaengige Use-Cases parallel.
|
||||||
|
- Umfangreiche Individualentwicklung ausserhalb PowerOn-Standardfeatures.
|
||||||
|
- Produktions-HA/DR, rechtliche Due-Diligence, vollstaendige Penetrationstests.
|
||||||
|
- Schulung der gesamten Belegschaft.
|
||||||
|
|
||||||
|
### 6.3 Vor-Sprint-Gates (Go/No-Go)
|
||||||
|
|
||||||
|
Vor Start **CHF 2’000**-Phase muessen erfuellt sein:
|
||||||
|
|
||||||
|
- [ ] Geschaeftlicher **Use-Case Owner** benannt.
|
||||||
|
- [ ] **Technischer Ansprechpartner** mit Berechtigung fuer Testsystem oder Pilot.
|
||||||
|
- [ ] **Liste der Quellen** und Freigabe durch Datenschutz/Compliance (falls noetig).
|
||||||
|
- [ ] **KPI-Set** schriftlich unterschrieben (siehe Kapitel 8).
|
||||||
|
- [ ] Zugang zu PowerOn-Umgebung (Kunde oder PowerOn-Hosting laut Vereinbarung).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. 48h-Ablauf (Kalender – Beispiel)
|
||||||
|
|
||||||
|
**Vorlauf (remote, typisch 3–5 Arbeitstage vor Sprint):** Kickoff 60 Min., Scope-Freeze, Zugriffe, Testdaten.
|
||||||
|
|
||||||
|
| Zeit | Tag 1 | Tag 2 |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| Vormittag | Phase 1–2 Abschluss: Use-Case frozen, Quellen angebunden/indexiert | Phase 4: Integration finalisieren, End-to-End-Tests |
|
||||||
|
| Nachmittag | Phase 3–4 Start: Tooling, erste Agent-/Workflow-Version | Pilotlauf mit Testnutzern, Runbook, Handover-Vorbereitung |
|
||||||
|
|
||||||
|
**Direkt nach Sprint:** Handover-Termin; **KPI-Messfenster** z. B. **10 Arbeitstage** nach Uebergabe (konfigurierbar).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. KPI-Framework fuer CHF 7’000
|
||||||
|
|
||||||
|
*Im Vertrag: **3–5 KPIs** waehlen, je eine klare **Messmethode**, **Zielwert**, **Messzeitpunkt**.*
|
||||||
|
|
||||||
|
### 8.1 KPI-Katalog (Auswahl)
|
||||||
|
|
||||||
|
| ID | KPI (Beispiel) | Messidee | Beispiel-Zielwert |
|
||||||
|
| --- | --- | --- | --- |
|
||||||
|
| K1 | **Zeit pro Vorgang** | Zeitstempel Start/Ende in Pilot (oder Ticket-Stichprobe) | ≥ **30 %** Reduktion vs. Baseline (4-Wochen-Durchschnitt) |
|
||||||
|
| K2 | **Anteil automatisierter Schritte** | Definierte Teilschritte ohne manuellen Eingriff | ≥ **70 %** der Schritte im definierten Prozess |
|
||||||
|
| K3 | **Fehlerquote / Nacharbeit** | Anzahl Eskalationen oder Korrekturloops pro 100 Vorgaenge | ≤ **X** (Baseline + Schwelle) |
|
||||||
|
| K4 | **Time-to-First-Answer** | Median bis erste brauchbare Agent-Antwort | ≤ **Y Minuten** |
|
||||||
|
| K5 | **Pilot-Akzeptanz** | SUS oder interne 1–5-Befragung nach 2 Wochen | Mittelwert ≥ **4.0** |
|
||||||
|
| K6 | **Verfuegbarkeit im Pilot** | Uptime der Agent-Instanz in Messfenster | ≥ **99 %** (ausser geplante Wartung) |
|
||||||
|
|
||||||
|
### 8.2 Regeln
|
||||||
|
|
||||||
|
- **Baseline** vor Sprint dokumentieren (Stichprobe oder Kennzahl aus Reporting).
|
||||||
|
- Bei **Teilerreichung** optional interne Policy definieren (z. B. gestaffelte Zahlung oder Nachsprint-Paket – *nur wenn gewuenscht, rechtlich klaeren*).
|
||||||
|
- **CHF 7’000** faellig bei **Erreichen aller vertraglich definierten KPI-Ziele** zum Messzeitpunkt.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Deliverables (Checkliste)
|
||||||
|
|
||||||
|
- [ ] Funktionsfaehiger **Agent/Workflow** im vereinbarten Scope auf PowerOn.
|
||||||
|
- [ ] Konfigurierte **Wissensquellen** (laut Vertrag).
|
||||||
|
- [ ] **Integration** (laut Vertrag) inkl. Testnachweis.
|
||||||
|
- [ ] **Runbook**: Bedienung, Grenzen, Eskalation, bekannte Einschraenkungen.
|
||||||
|
- [ ] **Enablement**: Session oder Video laut Vertrag.
|
||||||
|
- [ ] **Uebergabeprotokoll** mit Link auf Testfaelle / Abnahme-Checkliste.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Commercials
|
||||||
|
|
||||||
|
| Position | Betrag | Faelligkeit |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **Gesamt Fixpreis** | **CHF 9’000** (exkl. MWSt. je nach Vereinbarung) | |
|
||||||
|
| **Anzahlung** | **CHF 2’000** | Bei Vertragsunterzeichnung / Sprint-Freigabe |
|
||||||
|
| **Erfolgszahlung** | **CHF 7’000** | Bei Nachweis der **vereinbarten KPIs** zum Messzeitpunkt |
|
||||||
|
|
||||||
|
Zusaetzliche Leistungen: nach **Stunden- oder Paketsatz** gemaess Preisliste.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Risiken und Mitigation
|
||||||
|
|
||||||
|
| Risiko | Mitigation |
|
||||||
|
| --- | --- |
|
||||||
|
| Schlechte Datenqualitaet | Gate vor Sprint: Stichprobe, Bereinigung, Scope reduzieren |
|
||||||
|
| Fehlende API-Dokumentation | Frueh Integrations-Spike; sonst manueller Uebergabe-Modus im Scope |
|
||||||
|
| Compliance verzoegert | Sprint startet erst nach Freigabe; keine parallele „Schatten-Produktion“ |
|
||||||
|
| Scope Creep | Aenderungen nur per Change Request; Product Owner auf Kundenseite |
|
||||||
|
| Erwartung „magische KI“ | KPIs und Grenzen im Runbook; Human-in-the-Loop explizit |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Sales Playbook
|
||||||
|
|
||||||
|
### 12.1 Discovery-Fragen (Auszug)
|
||||||
|
|
||||||
|
1. Welcher **konkrete Vorgang** kostet heute am meisten Zeit pro Woche?
|
||||||
|
2. Wo liegen die **Quellen** (Systeme, Ordner, Tickets)?
|
||||||
|
3. Wer ist **Owner** fuer Inhalt und fuer Technik?
|
||||||
|
4. Welche **Compliance**-Grenzen gelten (DSG, Kundenvertraege)?
|
||||||
|
5. Wie messen Sie heute **Qualitaet** (Fehlerquote, SLA)?
|
||||||
|
6. Gibt es eine **Baseline-Zahl** fuer die letzten 4 Wochen?
|
||||||
|
7. Wie viele **Pilotnutzer** sind realistisch in 2 Wochen?
|
||||||
|
8. Ist **PowerOn** bereits im Einsatz oder kommt eine Pilot-Instanz?
|
||||||
|
9. Was passiert bei **Erfolg** – Rollout-Plan?
|
||||||
|
10. Was waere **nicht** im Scope (bewusst ausschliessen)?
|
||||||
|
|
||||||
|
### 12.2 Einwaende
|
||||||
|
|
||||||
|
- **„Zu schnell / zu guenstig.“** → Fixpreis gilt nur bei fixem Scope; Referenzmethodik AI-augmented Delivery; menschliche Validierung.
|
||||||
|
- **„Wir haben keine Daten.“** → Mindestens FAQs oder interne Vorlagen reichen oft; sonst kein Launch48.
|
||||||
|
- **„IT blockiert.“** → Gates und Pilot-Instanz-Option; kleinster sicherer Umfang.
|
||||||
|
|
||||||
|
### 12.3 Qualifikations-Scorecard (einfach)
|
||||||
|
|
||||||
|
| Kriterium | Punkte (0–2) |
|
||||||
|
| --- | --- |
|
||||||
|
| Klarer Use-Case | |
|
||||||
|
| Zugang zu Daten bis Sprint-Start | |
|
||||||
|
| Sponsor auf Fachseite | |
|
||||||
|
| Tech-Ansprechpartner | |
|
||||||
|
| KPI denkbar | |
|
||||||
|
| **Summe ≥ 8** → hohe Prioritaet fuer Angebot |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 13. Marketing-Kit (Verweise)
|
||||||
|
|
||||||
|
| Artefakt | Datei |
|
||||||
|
| --- | --- |
|
||||||
|
| **Kundenangebot (primaer, teilbar)** | [poweron-launch48-offer.md](./poweron-launch48-offer.md) |
|
||||||
|
| **4-Folien-Deck (Praesentation)** | [launch48-deck-presentation.md](./launch48-deck-presentation.md) |
|
||||||
|
| Flyer (2 Seiten, Kurzfassung) | [flyer-poweron-48h-agent.md](./flyer-poweron-48h-agent.md) |
|
||||||
|
| Case Story (Illustration / Template, nicht Primaerverkauf) | [case-study-poweron-48h-agent.md](./case-study-poweron-48h-agent.md) |
|
||||||
|
|
||||||
|
### 13.1 LinkedIn-Posts (Kurzvarianten)
|
||||||
|
|
||||||
|
1. **Outcome:** „48 Stunden. Ein Agent. Messbarer Mehrwert. Launch48 auf PowerOn – Fixpreis, KPI-gestaffelt.“
|
||||||
|
2. **IT/Governance:** „KI ohne Daten-Chaos: Launch48 verankert Ihren Agent auf PowerOn – mit Quellen, Rollen und klarer Integration.“
|
||||||
|
3. **Social Proof (nur mit Freigabe):** „Wie bei komplexen Migrationen liefern wir **schnell und review-getrieben** – jetzt als 48h-Paket fuer Ihren ersten produktiven Agent.“
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 14. Rechtliches und Freigaben (Checkliste)
|
||||||
|
|
||||||
|
Siehe [case-study-poweron-48h-agent.md](./case-study-poweron-48h-agent.md) Abschnitt „Freigaben“. Insbesondere:
|
||||||
|
|
||||||
|
- [ ] Abraxas- und andere Kundennennung in Marketing freigegeben.
|
||||||
|
- [ ] AGB/Vertrag fuer KPI-Zahl Klauseln geprueft.
|
||||||
|
- [ ] Angebotsname **Launch48** Markenpruefung (intern).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 15. Team (Ansprechpartner)
|
||||||
|
|
||||||
|
- **Patrick Motsch** – Technische Leitung, AI-Strategie
|
||||||
|
- **Ida Dittrich** – Architektur, Plattform, Qualitaet
|
||||||
|
- **Stephan Schellworth** – Projektsteuerung, Business Alignment
|
||||||
|
|
||||||
|
**PowerOn AG** – [www.poweron.swiss](https://www.poweron.swiss)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Keywords
|
||||||
|
|
||||||
|
Launch48, PowerOn, KI-Agent, 48h Sprint, Fixpreis, KPI, Enterprise AI, Orchestrierung, Datenhoheit, Governance, produktisierte Dienstleistung, Schweiz, DACH
|
||||||
BIN
docs/connections-ui-tests.xlsx
Normal file
BIN
docs/connections-ui-tests.xlsx
Normal file
Binary file not shown.
183
docs/feature-deck-ai-chat.md
Normal file
183
docs/feature-deck-ai-chat.md
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
# PORTO AI Chat — Feature Slide Deck
|
||||||
|
|
||||||
|
Struktur analog zu «20260408 Local LLM.pdf» (6 Folien). Text für PowerPoint / Keynote / PDF-Export.
|
||||||
|
|
||||||
|
**Produktname:** PORTO (von PowerOn)
|
||||||
|
**Feature:** AI Chat (Unified AI Workspace)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 1 — Titelfolie
|
||||||
|
|
||||||
|
**Hauptzeile (gross):**
|
||||||
|
Ihr sicherer KI-Arbeitsplatz.
|
||||||
|
Chatten, analysieren, automatisieren — in einer Oberfläche.
|
||||||
|
|
||||||
|
**Fliesstext:**
|
||||||
|
PORTO gibt Ihrem Team einen KI-Agenten, der Dokumente versteht, Quellen verbindet und Ergebnisse liefert — ohne Datenabfluss ins Ausland.
|
||||||
|
|
||||||
|
**Kernnutzen (3 Bullets):**
|
||||||
|
|
||||||
|
- KI-Chat mit Dateizugriff und Dokumentenverständnis (RAG)
|
||||||
|
- Verbindung zu SharePoint, OneDrive, Google Drive und weiteren Quellen
|
||||||
|
- Schweizer Datenhaltung; Private LLM optional
|
||||||
|
|
||||||
|
**Fusszeile / Zielgruppe:**
|
||||||
|
Für Treuhand, Legal, Finance und weitere vertrauenssensible Bereiche.
|
||||||
|
|
||||||
|
**Logo:** PORTO / PowerOn
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 2 — Problem
|
||||||
|
|
||||||
|
**Hauptzeile:**
|
||||||
|
Warum KI-Chat im Unternehmen heute oft an der Realität scheitert
|
||||||
|
|
||||||
|
**Fliesstext:**
|
||||||
|
Viele Organisationen wollen produktiv mit KI chatten — aber sensible Prozesse und fehlender Systemkontext bremsen die Umsetzung:
|
||||||
|
|
||||||
|
**Schmerzpunkte (Bullets):**
|
||||||
|
|
||||||
|
- Vertrauliche Inhalte landen in öffentlichen Chat-Tools (Copy/Paste-Risiko)
|
||||||
|
- Standard-Chats haben keinen sicheren Zugriff auf Unternehmensdokumente
|
||||||
|
- Ergebnisse müssen manuell in Dateien, Mails und Reports übertragen werden
|
||||||
|
- IT und Compliance verlangen Kontrolle über Modelle und Datenflüsse — Nutzer wollen Geschwindigkeit
|
||||||
|
|
||||||
|
**Zwischenüberschrift:**
|
||||||
|
Die Folgen im Alltag:
|
||||||
|
|
||||||
|
**Folgen (kurze Liste):**
|
||||||
|
|
||||||
|
- Compliance- und Reputationsrisiko
|
||||||
|
- Medienbrüche und Doppelarbeit
|
||||||
|
- Langsame Bearbeitung
|
||||||
|
- Verzögerte oder uneinheitliche KI-Nutzung
|
||||||
|
|
||||||
|
**Abschlusszeile:**
|
||||||
|
Das Problem ist nicht der Wille zur Innovation. Das Problem ist der fehlende sichere Rahmen für produktiven KI-Chat mit echtem Dokumentenkontext.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 3 — Lösung
|
||||||
|
|
||||||
|
**Hauptzeile:**
|
||||||
|
PORTO von PowerOn bringt sicheren KI-Chat in produktive Abläufe
|
||||||
|
|
||||||
|
**Fliesstext:**
|
||||||
|
PORTO AI Chat verbindet Konversation mit Datenhoheit und Agentenfähigkeit.
|
||||||
|
|
||||||
|
**Vier Säulen:**
|
||||||
|
|
||||||
|
| Säule | Inhalt |
|
||||||
|
|-------|--------|
|
||||||
|
| **Kontextbewusst** | Semantische Wissensabfrage (RAG) über Ihre Dokumente — nicht nur «blind» chatten |
|
||||||
|
| **Praktisch** | Eine Arbeitsfläche: Chats, Dateien, Datenquellen; Drag & Drop, Vorschau, klare Nachvollziehbarkeit |
|
||||||
|
| **Aktiv** | KI-Agent mit Tools: lesen, zusammenfassen, strukturierte Inhalte erstellen, Dateien vorschlagen — mit Ihrer Freigabe |
|
||||||
|
| **Kontrollierbar** | Modellwahl (z. B. OpenAI, Mistral, Private LLM), definierte Quellen, Daten in der Schweiz |
|
||||||
|
|
||||||
|
**Mit PORTO nutzen Teams KI so, wie sie gebraucht wird:**
|
||||||
|
|
||||||
|
- effizient
|
||||||
|
- nachvollziehbar
|
||||||
|
- geschützt
|
||||||
|
- geeignet für vertrauliche Informationen
|
||||||
|
|
||||||
|
**Abschlusszeile:**
|
||||||
|
So wird aus Zurückhaltung echte Umsetzung.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 4 — Typische Einsatzfelder
|
||||||
|
|
||||||
|
**Hauptzeile:**
|
||||||
|
Typische Einsatzfelder für PORTO AI Chat
|
||||||
|
|
||||||
|
**Untertitel:**
|
||||||
|
Wo PORTO im Alltag den grössten Nutzen bringt — schnell, verständlich, messbar.
|
||||||
|
|
||||||
|
**Vier Kacheln (je: Problem | Mit PORTO | Nutzen):**
|
||||||
|
|
||||||
|
### 1. Dokumentenanalyse & Prüfung
|
||||||
|
|
||||||
|
- **Problem:** PDFs, Verträge und Reports sind verteilt; manuelles Suchen und Quervergleiche kosten Zeit.
|
||||||
|
- **Mit PORTO:** Dateien hochladen oder aus dem Workspace wählen; gezielt fragen — die KI nutzt Dokumentenkontext und Struktur.
|
||||||
|
- **Nutzen:** Schnellere Einschätzung, weniger Suchaufwand, konsistente Antworten auf wiederkehrende Fragen.
|
||||||
|
|
||||||
|
### 2. Berichte & Ausarbeitungen
|
||||||
|
|
||||||
|
- **Problem:** Informationen aus mehreren Quellen zusammentragen und in saubere Dokumente bringen.
|
||||||
|
- **Mit PORTO:** Agent unterstützt bei Recherche, Strukturierung und Erstellung — verbunden mit Dateien und Datenquellen.
|
||||||
|
- **Nutzen:** Weniger manuelle Zusammenführung, höhere Durchsatzrate bei wiederkehrenden Deliverables.
|
||||||
|
|
||||||
|
### 3. Kommunikation & Übersetzung
|
||||||
|
|
||||||
|
- **Problem:** Entwürfe, Zusammenfassungen und Übersetzungen entstehen fragmentiert und ohne einheitlichen Leitfaden.
|
||||||
|
- **Mit PORTO:** KI formuliert, fasst zusammen und übersetzt — im geschützten Umfeld; Anbindung an E-Mail- und Cloud-Kontext wo vorgesehen.
|
||||||
|
- **Nutzen:** Schnellere, konsistentere Kommunikation bei gleichbleibender Governance.
|
||||||
|
|
||||||
|
### 4. Wissensabruf aus dem Unternehmen
|
||||||
|
|
||||||
|
- **Problem:** Wissen steckt in SharePoint, Drives, Ordnern und Alt-Dokumenten — Antworten dauern.
|
||||||
|
- **Mit PORTO:** Semantische Suche und Kontext über angebundene Quellen und indexierte Inhalte.
|
||||||
|
- **Nutzen:** Weniger «Wer hat das Dokument?» — mehr direkte, begründete Antworten.
|
||||||
|
|
||||||
|
**Fusszeile:**
|
||||||
|
PORTO von PowerOn macht Wissensarbeit einfacher — für Fachbereiche, Führung und Operations, nicht nur für Tech-Teams.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 5 — Warum PORTO? (CTA)
|
||||||
|
|
||||||
|
**Hauptzeile (gross):**
|
||||||
|
Warum PORTO?
|
||||||
|
|
||||||
|
**Kernbotschaften (3 Zeilen):**
|
||||||
|
|
||||||
|
- Ihre Daten bleiben in der Schweiz.
|
||||||
|
- Ihre Chats und Dokumente bleiben unter Kontrolle.
|
||||||
|
- Ihre Teams werden bei Wissensarbeit messbar schneller.
|
||||||
|
|
||||||
|
**Fliesstext:**
|
||||||
|
Wir zeigen Ihnen in einem kostenlosen Erstgespräch, wie PORTO AI Chat in Ihrem sensibelsten Prozess sinnvoll eingesetzt und wertschöpfend integriert werden kann.
|
||||||
|
|
||||||
|
**Claim:**
|
||||||
|
Ihr KI-Chat für sensible Daten und Dokumente — ohne Datenabfluss, ohne Kontrollverlust.
|
||||||
|
|
||||||
|
**Abschluss:**
|
||||||
|
Weil sensible KI Vertrauen braucht.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 6 — Team (unverändert zur Master-Präsentation)
|
||||||
|
|
||||||
|
**Überschrift:**
|
||||||
|
Wir kombinieren Strategie, Technologie und Umsetzungskraft.
|
||||||
|
|
||||||
|
**WER WIR SIND — Das PowerOn Team**
|
||||||
|
|
||||||
|
**Patrick Motsch** — Partner
|
||||||
|
Leitet erfolgreich komplexe IT-Implementierungsprojekte; langjährige Erfahrung in innovativer Softwareentwicklung.
|
||||||
|
*Mission: Nachhaltige KI-Integration für Schweizer KMUs.*
|
||||||
|
|
||||||
|
**Ida Dittrich** — Product Architect
|
||||||
|
Verbindet wissenschaftliches Know-how mit praktischer IT-Erfahrung und bringt innovative Ansätze in technische Projekte ein.
|
||||||
|
|
||||||
|
**Stephan Schellworth** — Business Integration
|
||||||
|
Verbindet strategisches Denken mit praxisnaher Projektsteuerung und gestaltet digitale Projekte erfolgreich.
|
||||||
|
|
||||||
|
**Rollen (kurz):**
|
||||||
|
Patrick Motsch: CEO/CTO · Ida Dittrich: Product Architect · Stephan Schellworth: Business Integration
|
||||||
|
|
||||||
|
**Kontakt:**
|
||||||
|
PowerOn AG
|
||||||
|
Birmensdorferstrasse 94, 8003 Zürich
|
||||||
|
www.poweron.swiss
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hinweise für Design / PDF
|
||||||
|
|
||||||
|
- Typografie und Farben wie bei «Local LLM»-Deck übernehmen.
|
||||||
|
- Folie 4: Vier gleich breite Spalten oder 2×2-Raster; «Problem / Mit PORTO / Nutzen» visuell trennen (z. B. kleine Labels).
|
||||||
|
- Optional: Ein Screenshot der Workspace-Oberfläche (3-Spalten) als dezentes Hintergrund- oder Rand-Element auf Folie 3 — nur wenn markenkonform freigegeben.
|
||||||
22
docs/feature-pitch-automation.md
Normal file
22
docs/feature-pitch-automation.md
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# Automation — One-Pager
|
||||||
|
|
||||||
|
**Layout:** Links Screenshot (Flow-Editor oder Workflow-Uebersicht), rechts Text. PowerOn-Logo rechts oben.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Automation
|
||||||
|
|
||||||
|
Wiederkehrende Aufgaben einmal einrichten — und nie wieder manuell erledigen.
|
||||||
|
|
||||||
|
Die Automation in PowerOn uebernimmt wiederkehrende Ablaeufe zuverlaessig fuer Sie. Stellen Sie Schritte visuell zusammen oder nutzen Sie fertige Vorlagen — die Plattform fuehrt sie auf Knopfdruck, nach Zeitplan oder ausgeloest durch eine E-Mail aus.
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
|
||||||
|
Ein Klick oder ein Zeitplan — der Ablauf erledigt den Rest.
|
||||||
|
|
||||||
|
- Ablaeufe visuell per Drag & Drop zusammenstellen und verbinden
|
||||||
|
- Fertige Vorlagen fuer gaengige Geschaeftsprozesse sofort einsetzbar
|
||||||
|
- Start per Zeitplan, Formular, E-Mail-Eingang oder manuell
|
||||||
|
- Freigaben, Formulare und Uploads als menschliche Zwischenschritte einbinden
|
||||||
|
|
||||||
|
> **Screenshot:** Flow-Editor mit verbundenen Schritten (z. B. Zeitplan → KI-Zusammenfassung → E-Mail) oder Workflow-Liste mit Status und naechster Ausfuehrung.
|
||||||
22
docs/feature-pitch-commcoach.md
Normal file
22
docs/feature-pitch-commcoach.md
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# Kommunikations-Coach — One-Pager
|
||||||
|
|
||||||
|
**Layout:** Links Screenshot (Dashboard oder Coaching-Session), rechts Text. PowerOn-Logo rechts oben.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Kommunikations-Coach
|
||||||
|
|
||||||
|
Besser kommunizieren — mit KI als persoenlichem Sparringspartner.
|
||||||
|
|
||||||
|
Der Kommunikations-Coach in PowerOn trainiert gezielt Gespraechssituationen aus dem Berufsalltag. Waehlen Sie ein Thema, ueben Sie per Chat oder Sprache mit der KI — und verfolgen Sie Ihren Fortschritt ueber Sessions hinweg.
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
|
||||||
|
Von der ersten Uebung bis zum messbaren Fortschritt — alles in einem Dossier.
|
||||||
|
|
||||||
|
- Coaching-Themen fuer typische Fuehrungssituationen (Feedback, Konflikte, Verhandlung u. a.)
|
||||||
|
- Training per Chat oder Sprache — mit realistischen Rollenspielen
|
||||||
|
- Bewertung nach jeder Session mit konkreten Verbesserungshinweisen
|
||||||
|
- Fortschritt, Aufgaben und Erfolge (Streaks, Level, Auszeichnungen) auf einen Blick
|
||||||
|
|
||||||
|
> **Screenshot:** Dashboard mit Streak, Kompetenz-Score und aktiven Themen oder laufende Coaching-Session mit Chat-Verlauf und Sprachsteuerung.
|
||||||
BIN
docs/files-ui-tests.xlsx
Normal file
BIN
docs/files-ui-tests.xlsx
Normal file
Binary file not shown.
105
docs/flyer-poweron-48h-agent.md
Normal file
105
docs/flyer-poweron-48h-agent.md
Normal file
|
|
@ -0,0 +1,105 @@
|
||||||
|
# Flyer: PowerOn Launch48
|
||||||
|
*Zweiseitige Kurzfassung zum Drucken – verweist auf das Kundenangebot*
|
||||||
|
|
||||||
|
**Vollstaendiges, teilbares Angebot:** [poweron-launch48-offer.md](./poweron-launch48-offer.md)
|
||||||
|
**4-Folien-Deck (Praesentation):** [launch48-deck-presentation.md](./launch48-deck-presentation.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Layout-Briefing (fuer Designer / Canva)
|
||||||
|
|
||||||
|
| Element | Vorgabe |
|
||||||
|
| --- | --- |
|
||||||
|
| **Format** | DIN A4 oder US Letter, **zweiseitig**; alternativ **A5 hoch** fuer Events |
|
||||||
|
| **Primaerfarbe** | Blau **#1976d2** |
|
||||||
|
| **Sekundaer** | Tuerkis-Akzente, Violett dezent (wie [case-study-power-desktop.md](./case-study-power-desktop.md)) |
|
||||||
|
| **Hintergrund** | Hell, viel Weissraum; Seite 1 „Hero“ |
|
||||||
|
| **Typo** | Serioes, gut lesbar |
|
||||||
|
| **Bilder** | Optional: abstrakte Workspace-Illustration |
|
||||||
|
| **Logo** | PowerOn oben links Seite 1 |
|
||||||
|
| **QR** | Seite 2: Link zum PDF/Web **Launch48** oder Kalender |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Seite 1
|
||||||
|
|
||||||
|
### Headline
|
||||||
|
|
||||||
|
**PowerOn Launch48**
|
||||||
|
**Ihre erste produktive KI-Loesung – in 48 Stunden.**
|
||||||
|
|
||||||
|
### Subline
|
||||||
|
|
||||||
|
**Ein klar abgegrenzter Anwendungsfall. Ihre Daten in geregeltem Rahmen. Ein Ergebnis, das sich im Pilot messen laesst.**
|
||||||
|
|
||||||
|
### Drei Kurz-Punkte (Problem)
|
||||||
|
|
||||||
|
- **Viel Routine** – Teams haengen in wiederkehrenden Schritten.
|
||||||
|
- **Wissen verteilt** – Antworten dauern, Qualitaet schwankt.
|
||||||
|
- **KI ohne Leitplanken** – unsichere Tools statt freigegebener Plattform.
|
||||||
|
|
||||||
|
### Vier Phasen (grafisch 1–4, wie Praesentation)
|
||||||
|
|
||||||
|
1. **Discovery** – Gemeinsame Analyse; Use-Case mit grossem Hebel, in 48h realistisch.
|
||||||
|
2. **Design und Architektur** – Daten, Integration auf PowerOn; Erfolgsziele schriftlich vor Start.
|
||||||
|
3. **Build und Integration** – Umsetzung, Tests; Fachseite prueft mit (parallel).
|
||||||
|
4. **Deploy und Handover** – Go-Live in vereinbarter Umgebung, Doku, Einweisung.
|
||||||
|
|
||||||
|
*Volltext und Zeitrahmen:* [poweron-launch48-offer.md](./poweron-launch48-offer.md) Abschnitt „Der Ablauf“.
|
||||||
|
|
||||||
|
### Angebot (Box)
|
||||||
|
|
||||||
|
| | |
|
||||||
|
| --- | --- |
|
||||||
|
| **Paket** | **CHF 9’000** Fixpreis |
|
||||||
|
| **Zu Beginn** | **CHF 2’000** |
|
||||||
|
| **Bei Erfolg** | **CHF 7’000** (wenn die vereinbarten Erfolgsziele im Pilot erreicht sind) |
|
||||||
|
|
||||||
|
*Der Preis steht fuer Transparenz und einen klaren Rahmen: kein offenes Beratungsprojekt, sondern ein fokussiertes Paket auf PowerOn.*
|
||||||
|
*Details und Grenzen: siehe Angebotsdokument Launch48.*
|
||||||
|
|
||||||
|
### Footer
|
||||||
|
|
||||||
|
**poweron.swiss** · PowerOn AG, Zuerich
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Seite 2
|
||||||
|
|
||||||
|
### Ueberschrift
|
||||||
|
|
||||||
|
**Was Sie bekommen · Was Sie mitbringen**
|
||||||
|
|
||||||
|
**Wir:**
|
||||||
|
|
||||||
|
- Funktionierende **KI-Loesung** auf PowerOn fuer **einen** definierten Fall
|
||||||
|
- **Datenquellen** und **eine** Anbindung im Standardrahmen (wie vereinbart)
|
||||||
|
- **Einweisung** und **kurze Dokumentation**
|
||||||
|
|
||||||
|
**Sie:**
|
||||||
|
|
||||||
|
- **Ansprechperson** Fach und IT
|
||||||
|
- **Zugriffe** und **Freigaben** rechtzeitig
|
||||||
|
- **kleine Pilotgruppe** fuer die Messung der Erfolgsziele
|
||||||
|
|
||||||
|
### Team
|
||||||
|
|
||||||
|
Patrick Motsch · Ida Dittrich · Stephan Schellworth
|
||||||
|
Birmensdorferstrasse 94, 8003 Zuerich
|
||||||
|
|
||||||
|
### Call to Action
|
||||||
|
|
||||||
|
**15-Minuten-Gespraech:** Passt Launch48 zu Ihnen?
|
||||||
|
→ QR / Link / E-Mail
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Druck-Hinweise
|
||||||
|
|
||||||
|
PDF **CMYK**, **3 mm Beschnitt**, Schriften einbetten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Keywords
|
||||||
|
|
||||||
|
Launch48, PowerOn, Flyer, KI, 48h, Fixpreis, Kundenangebot
|
||||||
138
docs/landing-billing-transparenz-poweron.md
Normal file
138
docs/landing-billing-transparenz-poweron.md
Normal file
|
|
@ -0,0 +1,138 @@
|
||||||
|
# PowerOn – Kosten auf einen Blick (Landingpage)
|
||||||
|
|
||||||
|
Dieses Dokument fasst die **tatsächlich im Gateway implementierte** Abrechnungslogik in verständlicher Sprache zusammen – für transparente Darstellung auf der Website. Alle Zahlen und Regeln beziehen sich auf den Stand des Codes (siehe Abschnitt [Quellen im Repository](#quellen-im-repository)).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Was kostet PowerOn? (Kurzfassung)
|
||||||
|
|
||||||
|
- **Abonnement (Standard):** Sie zahlen **pro Abrechnungszeitraum** für jeden **aktiven Benutzer** und jede **aktive Feature-Instanz** – wahlweise **monatlich** oder **jährlich** (Preise in **CHF**).
|
||||||
|
- **Inkludiert im Abo:** Ein festes **KI-Budget in CHF pro Abrechnungsperiode** (z. B. monatlich 10 CHF bzw. jährlich 120 CHF beim Standard-Plan).
|
||||||
|
- **Darüber hinaus:** KI-Nutzung wird **verbrauchsbasiert** vom Guthaben abgebucht; der Endbetrag ergibt sich aus den **Provider-Einstandskosten** plus einem **definierten Aufschlag** im Backend.
|
||||||
|
- **Speicher:** Über dem im Plan enthaltenen Datenvolumen fällt **zusätzlicher Speicher** an (**CHF pro GB und Monat**).
|
||||||
|
- **Aufladen:** Zusätzliches Guthaben ist per **Stripe Checkout** in festen Stufen möglich (**10–500 CHF**).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## So setzt sich der Preis zusammen
|
||||||
|
|
||||||
|
### 1) Fixe Abo-Komponente (Benutzer + Instanzen)
|
||||||
|
|
||||||
|
Die wählbaren Pläne sind im Backend als **fester Katalog** hinterlegt. Maßgeblich sind:
|
||||||
|
|
||||||
|
| Plan (Schlüssel) | Zeitraum | Preis pro aktivem User | Preis pro aktiver Feature-Instanz | Inkl. Datenvolumen (Plan) | Inkl. KI-Budget (pro Periode) |
|
||||||
|
|------------------|----------|------------------------|-----------------------------------|---------------------------|-------------------------------|
|
||||||
|
| **Standard (Monatlich)** `STANDARD_MONTHLY` | Monat | **90 CHF** | **150 CHF** | **1024 MB** (1 GB) | **10 CHF** |
|
||||||
|
| **Standard (Jährlich)** `STANDARD_YEARLY` | Jahr | **1080 CHF** | **1800 CHF** | **1024 MB** (1 GB) | **120 CHF** |
|
||||||
|
|
||||||
|
**Hinweis:** Die Jahrespreise entsprechen **12 ×** den Monatsbeträgen (gleiche effektive Monatsrate).
|
||||||
|
|
||||||
|
**Hinweis zu Limits:** Bei den Standard-Plänen sind im Katalog **keine** `maxUsers` / `maxFeatureInstances` gesetzt (`None` = im Modell **keine Plan-Obergrenze**; nur das **Datenvolumen** ist mit 1024 MB pro Mandat als Plan-Limit hinterlegt). Der Trial-Plan hat explizit **1** User und **3** Instanzen max.
|
||||||
|
|
||||||
|
**Wichtig für das Verständnis:** Die Abrechnung erfolgt **nutzungsorientiert in dem Sinne**, dass sich die **Gesamtsumme** aus der **Anzahl aktiver User** und **aktiver Feature-Instanzen** ergibt. Änderungen (z. B. mehr Instanzen) können über Stripe mit **Proration** abgebildet werden (technische Umsetzung im Gateway).
|
||||||
|
|
||||||
|
### 2) Testphase (Trial)
|
||||||
|
|
||||||
|
Der Plan **7-Tage-Test** (`TRIAL_7D`) ist **kein kostenpflichtiges Abo** im Katalog-Sinne, sondern eine begrenzte Phase:
|
||||||
|
|
||||||
|
- **Dauer:** 7 Tage
|
||||||
|
- **Limits:** max. **1** User, max. **3** Feature-Instanzen, **500 MB** Datenvolumen
|
||||||
|
- **Inkl. KI-Budget:** **5 CHF**
|
||||||
|
- Nach Ablauf ist laut Katalog ein Übergang zum **Standard (Monatlich)** vorgesehen (`successorPlanKey`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Variable Kosten (über das Abo hinaus)
|
||||||
|
|
||||||
|
### KI-Nutzung (Pay-per-Use aus dem Guthaben)
|
||||||
|
|
||||||
|
- Vor KI-Aufrufen prüft das System u. a., ob ein **aktives Abonnement** (oder Trial / begrenzt überfälliger Status) vorliegt und ob **ausreichend Guthaben** vorhanden ist.
|
||||||
|
- Die bei einem Aufruf verbuchte Summe basiert auf einem **Basispreis** (vom KI-Provider / AICore geliefert) und wird im Gateway mit einem **Aufschlag** multipliziert.
|
||||||
|
|
||||||
|
**Implementierter Aufschlag:** Konstante `BILLING_MARKUP_PERCENT = 400` → Faktor **\(1 + 400\% = 5{,}0\)** auf den übergebenen Basisbetrag.
|
||||||
|
|
||||||
|
> **Transparenz-Hinweis:** Im Code-Kommentar neben der Konstante steht eine andere Erläuterung („Faktor 2.0“). **Maßgeblich für die Verrechnung ist die Implementierung** (`400` → Faktor 5). Bei Änderungen der Konstante bitte Landingpage-Text anpassen.
|
||||||
|
|
||||||
|
### Speicher über dem Plan-Inklusivvolumen
|
||||||
|
|
||||||
|
- **Preis überschüssigen Speichers:** **0,50 CHF pro GB und Monat** (`STORAGE_PRICE_PER_GB_CHF`), soweit das Volumen über dem im Plan enthaltenen **Soft-Limit** liegt (Standard-Pläne: **1024 MB**).
|
||||||
|
|
||||||
|
### Guthaben aufladen (optional)
|
||||||
|
|
||||||
|
Erlaubte **Einmal-Beträge** für Stripe-Top-up (serverseitig fix): **10, 25, 50, 100, 250, 500 CHF**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Abrechnung & Zahlung (ohne Technik-Jargon)
|
||||||
|
|
||||||
|
1. **Abo:** Aktivierung läuft über **Stripe** (wiederkehrende Abrechnung). Mengen (User/Instanzen) werden mit Stripe synchronisiert; Rechnungsstellung erfolgt über Stripe entsprechend der gewählten Periode.
|
||||||
|
2. **Guthaben:** KI-Verbrauch und ggf. Speicher-Overage belasten das **Prepaid-Guthaben** des Mandats (bzw. die kontextabhängige Kontoführung im Billing-Modul).
|
||||||
|
3. **Top-up:** Mandats-Admins können per **Stripe Checkout** Guthaben kaufen; die Gutschrift erfolgt über **Webhooks** / Bestätigung – serverseitig nur erlaubte Beträge.
|
||||||
|
4. **Nachvollziehbarkeit:** Transaktionen und Auswertungen (z. B. nach Zeitraum, Provider, Modell, Feature) sind über die **Billing-API** abrufbar (für eingeloggte Nutzer je nach Rolle).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Währung, Steuern, Laufzeit
|
||||||
|
|
||||||
|
- **Währung:** Durchgängig **CHF** (Plan-Katalog, Speicher-Overage, Top-up-Stufen).
|
||||||
|
- **Intervalle:** **Monatlich** oder **jährlich** für die Standard-Pläne; Trial ohne Abo-Intervall.
|
||||||
|
- **MwSt. / Steuerlogik:** Im Gateway-Code ist **keine** automatische Umsatzsteuerberechnung für Stripe-Checkout der Abos erkennbar; die **Unternehmens-/MwSt.-Angaben** des Betreibers können aus der Konfiguration für Kommunikation genutzt werden – **finanzrechtliche Texte** auf der Landingpage sollten mit Buchhaltung/Legal abgestimmt werden.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## FAQ (für die Website)
|
||||||
|
|
||||||
|
**Ist dies ein Abo?**
|
||||||
|
Ja — die reguläre Nutzung von PowerOn ist ein Abonnement. Sie zahlen in einem festen Rhythmus (**monatlich oder jährlich**) für Ihre aktiven Benutzer und aktiven Funktionsbereiche (Feature-Instanzen). Die Zahlung verlängert sich automatisch, bis Sie kündigen oder Ihren Tarif anpassen. Im Abo-Preis ist bereits ein **KI-Budget** enthalten, das Sie jeden Monat bzw. jedes Jahr nutzen können. Für intensive KI-Nutzung oder zusätzlichen Speicher über dem inkludierten Volumen kann separat Guthaben aufgeladen werden — so bleibt das Basis-Abo planbar, während Mehrverbrauch fair nach tatsächlicher Nutzung abgerechnet wird. Die **kostenlose Testphase** (7 Tage) ist kein bezahltes Abo und endet automatisch; über den Wechsel zu einem Standard-Tarif werden Sie rechtzeitig informiert.
|
||||||
|
|
||||||
|
<!-- LEGAL-REVIEW: Formulierungen "verlängert sich automatisch, bis Sie kündigen" und
|
||||||
|
"über den Wechsel zu einem Standard-Tarif werden Sie rechtzeitig informiert"
|
||||||
|
vor Veröffentlichung mit Legal/AGB abstimmen (autoRenew=true im Gateway-Katalog,
|
||||||
|
successorPlanKey=STANDARD_MONTHLY beim Trial). -->
|
||||||
|
|
||||||
|
**Zahle ich nur das Abo?**
|
||||||
|
Nein. Das Abo deckt **Lizenzen** (User + Instanzen) und ein **inkludiertes KI-Budget** pro Periode. Darüber hinaus zählen **zusätzliche KI-Kosten** (verbrauchsbasiert mit Aufschlag) und ggf. **Speicher über dem Planlimit**.
|
||||||
|
|
||||||
|
**Wie transparent sind die KI-Kosten?**
|
||||||
|
Jede belastbare Nutzung wird als **Transaktion** geführt (u. a. Provider, Modell, Feature-Kontext). Der Endbetrag enthält den im Code konfigurierten **Aufschlag** auf die Provider-Basis.
|
||||||
|
|
||||||
|
**Kann ich Guthaben nachladen?**
|
||||||
|
Ja, in festen Paketen (**10–500 CHF**) über **Stripe**.
|
||||||
|
|
||||||
|
**Was passiert, wenn das Guthaben nicht reicht?**
|
||||||
|
Die Plattform blockiert entsprechende **KI-Aufrufe**, sobald die Prüfung (Abo + Guthaben) nicht mehr erfüllt ist – Details siehe `BillingService.checkBalance` im Gateway.
|
||||||
|
|
||||||
|
**Gibt es eine Testphase?**
|
||||||
|
Ja: **7 Tage** mit klaren Grenzen (User, Instanzen, Volumen, **5 CHF** KI-Budget).
|
||||||
|
|
||||||
|
**Wechseln sich die Preise ohne Ankündigung?**
|
||||||
|
Die öffentlich kommunizierten Beträge sollten mit dem **Deploy-Stand** des Gateways übereinstimmen: Die Standardpreise liegen im **Python-Plan-Katalog** (`BUILTIN_PLANS`), nicht in einer Marketing-Datei.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quellen im Repository
|
||||||
|
|
||||||
|
| Thema | Datei (Gateway) |
|
||||||
|
|--------|------------------|
|
||||||
|
| Plan-Katalog, CHF-Preise, Limits, KI-Budget | `modules/datamodels/datamodelSubscription.py` (`BUILTIN_PLANS`) |
|
||||||
|
| Speicher-Overage CHF/GB/Monat | `modules/datamodels/datamodelBilling.py` (`STORAGE_PRICE_PER_GB_CHF`) |
|
||||||
|
| KI-Aufschlag / Verbrauchsbuchung | `modules/serviceCenter/services/serviceBilling/mainServiceBilling.py` (`BILLING_MARKUP_PERCENT`, `calculatePriceWithMarkup`, `recordUsage`, `checkBalance`) |
|
||||||
|
| Top-up-Beträge, Stripe Checkout | `modules/serviceCenter/services/serviceBilling/stripeCheckout.py` (`ALLOWED_AMOUNTS_CHF`) |
|
||||||
|
| Billing- & Abo-Routen (API) | `modules/routes/routeBilling.py`, `modules/routes/routeSubscription.py`, `modules/routes/routeStore.py` (`/api/store/subscription-info`) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Mini-Übersicht als Fluss (optional für Diagramm auf der Seite)
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
visitor[Besucher] --> plans[Plan_User_und_Instanz_CHF]
|
||||||
|
plans --> included[Inkl_KI_Budget_pro_Periode]
|
||||||
|
included --> usage[Verbrauch_KI_und_Speicher]
|
||||||
|
usage --> topup[Optional_Stripe_TopUp]
|
||||||
|
topup --> insight[Transaktionen_und_Statistik_in_App]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Letzte inhaltliche Abstimmung mit dem Gateway-Code: Dokument erzeugt für Landingpage-Transparenz; bei Code-Änderungen bitte Tabelle und FAQ aktualisieren.*
|
||||||
233
docs/launch48-deck-presentation.md
Normal file
233
docs/launch48-deck-presentation.md
Normal file
|
|
@ -0,0 +1,233 @@
|
||||||
|
# PowerOn 48h AI Sprint – Praesentationsdeck (4 Folien)
|
||||||
|
*Fertiger Copy-Stand fuer Canva / PowerPoint / PDF-Export. Ersetzt die fruehere Arbeitsversion `20260320_AI_Hackathon.pdf` inhaltlich.*
|
||||||
|
|
||||||
|
**Kundenangebot (Fliesstext):** [poweron-launch48-offer.md](./poweron-launch48-offer.md)
|
||||||
|
|
||||||
|
**Schreibweise:** durchgaengig **PowerOn** (nicht PowerON). Produktname im Kundenfacing: **48h AI Sprint** (nicht mehr „Launch48“ als Markenname).
|
||||||
|
|
||||||
|
**Optional – Zusatzfolie Architektur (16:9, HTML):** [poweron-ki-betriebssystem-slide.html](./poweron-ki-betriebssystem-slide.html) – im Browser oeffnen, Ansicht **1920×1080** (z. B. DevTools-Geraetemodus), **Screenshot** oder Druck als PDF fuer PowerPoint/Keynote. Prompts fuer Bild-KI: [poweron-ki-betriebssystem-prompts.md](./poweron-ki-betriebssystem-prompts.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 1 von 4 – Einstieg (Hero)
|
||||||
|
|
||||||
|
### Haupttitel
|
||||||
|
**48h AI Sprint**
|
||||||
|
|
||||||
|
### Nutzenversprechen (ein Satz, sales-stark)
|
||||||
|
**Von der Idee zum pilotfaehigen MVP in 48 Stunden – gebaut mit KI-gestuetztem Engineering durch PowerOn, mit Fixpreis und Erfolgsanteil.**
|
||||||
|
|
||||||
|
### Outcome (ein Satz, greifbar – keine Wiederholung von „klar abgegrenzt“)
|
||||||
|
**Sie erhalten einen funktionierenden Software-Piloten im vereinbarten Umfang – spezifiziert, umgesetzt, getestet und dokumentiert – plus schriftlich fixierte Erfolgsziele fuer Abnahme und die zweite Zahlungsstufe.**
|
||||||
|
|
||||||
|
### Badge / Meta
|
||||||
|
**PowerOn · 48h AI Sprint · 2026**
|
||||||
|
|
||||||
|
### Drei Kurz-Pills (horizontal)
|
||||||
|
| Pill 1 | Pill 2 | Pill 3 |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **48 Stunden** Umsetzungsblock | **Fixpreis CHF 9'000** | **CHF 7'000** erfolgsgebunden |
|
||||||
|
|
||||||
|
*Hinweis fuer Layout (optional, klein unter Pills):* `CHF 7'000` = **78%** des Paketpreises – faellig bei nachgewiesenen, vorab vereinbarten Erfolgszielen im Pilot.
|
||||||
|
|
||||||
|
### Zahlungslogik (eine Zeile, zentral)
|
||||||
|
**CHF 2'000 zu Projektstart · CHF 7'000 bei erfuellten, schriftlich definierten Pilot-Zielen.**
|
||||||
|
|
||||||
|
### Vertrauen / Governance (eine Zeile)
|
||||||
|
**Gemeinsame Entscheidungsbasis: fester Leistungsrahmen, keine offene Stundenhonorarspirale. Betrieb, Datenfluesse und Freigaben stimmen wir vor dem 48h-Block mit Ihrer IT und Compliance ab.**
|
||||||
|
|
||||||
|
### Micro-CTA
|
||||||
|
**15-Minuten-Check: Passt Ihr Software-Vorhaben zum 48h AI Sprint?**
|
||||||
|
|
||||||
|
### CTA-Kanaele (konkret eintragen / im PDF verlinken)
|
||||||
|
- **Web:** [www.poweron.swiss](https://www.poweron.swiss)
|
||||||
|
- **E-Mail:** info@poweron.swiss *(Betreff-Vorschlag: „48h AI Sprint – 15-Minuten-Check“)*
|
||||||
|
- **Kalender:** *(Link zum Buchungstool hier einfuegen, sobald vorhanden)*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Was auf Folie 1 weg soll (Redundanzen aus alter Version)
|
||||||
|
- Keine doppelte **FIXPREIS**-Box.
|
||||||
|
- Kein paralleler Marken-Mix: durchgaengig **48h AI Sprint** als Produktname auf der Folie.
|
||||||
|
- Kein Mix aus **CHF 9'000** und **CHF 9k** auf derselben Folie.
|
||||||
|
- Pill 3 nicht nur **„78% bei Erfolg“** ohne Kontext – **CHF 7'000 erfolgsgebunden** plus optionaler Hinweis auf 78%.
|
||||||
|
- **„messbarer ROI“** nur, wenn ihr ihn operationalisiert; sonst: **„messbare Erfolgsziele im Pilot“**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 2 von 4 – Der Ablauf
|
||||||
|
|
||||||
|
### Titel
|
||||||
|
**DER ABLAUF**
|
||||||
|
|
||||||
|
### Untertitel (korrigiert)
|
||||||
|
**Vier Phasen · 48 Stunden · gemeinsam mit Ihrem Team**
|
||||||
|
|
||||||
|
### Subline
|
||||||
|
**KI-gestuetztes Engineering durch erfahrene Architektinnen und Architekten – auf der PowerOn-Plattform, mit Ihren Daten und Systemen.**
|
||||||
|
|
||||||
|
### Phase 1 – Discovery
|
||||||
|
Gemeinsame Analyse Ihres Vorhabens. Wir identifizieren den Scope mit dem groessten Nutzen – **messbar** und in **48 Stunden** realistisch umsetzbar.
|
||||||
|
|
||||||
|
### Phase 2 – Design und Architektur
|
||||||
|
Software-Architektur, Datenmodell und Integration auf der **PowerOn**-Plattform. Definition der **Erfolgsziele**, die **vor dem Start schriftlich fixiert** werden und ueber die **Erfolgszahlung** (CHF 7'000) entscheiden.
|
||||||
|
|
||||||
|
### Phase 3 – Build und Integration
|
||||||
|
Umsetzung der Loesung, Anbindung an Ihre Systeme **im Vereinbarten**, Testing. **Mensch prueft mit:** Validierung durch Ihre Fachanwenderinnen und -anwender – parallel zum Build.
|
||||||
|
|
||||||
|
### Phase 4 – Deploy und Handover
|
||||||
|
**Go-Live in Ihrer vereinbarten PowerOn-Umgebung** (Pilot oder Produktion je nach Vereinbarung), Wissenstransfer, Dokumentation. Ihr Team kann die Loesung **im vereinbarten Rahmen** vom ersten Tag an **selbststaendig weiterbetreiben und ausbauen**.
|
||||||
|
|
||||||
|
### Optional: Zeit-Splits (Beispiel-Verteilung, nicht vertraglich)
|
||||||
|
*Hinweis intern: Anpassen, wenn euer echtes Modell anders ist.*
|
||||||
|
|
||||||
|
| Block | Dauer (Beispiel) |
|
||||||
|
| --- | --- |
|
||||||
|
| Discovery / Vorbereitung | 4 h |
|
||||||
|
| Design / Architektur | 8 h |
|
||||||
|
| Build / Integration | 28 h |
|
||||||
|
| Deploy / Handover | 8 h |
|
||||||
|
|
||||||
|
### Fussbereich Folie 2
|
||||||
|
**Gebaut auf PowerOn – Ihrer AI-Augmented-Engineering-Plattform.**
|
||||||
|
|
||||||
|
PowerOn ist nicht nur ein Projektrahmen: Hier entsteht Ihre Loesung mit **KI als Produktivitaetshebel** des Teams – mit Monitoring, Auditierbarkeit, Rollen- und Rechteverwaltung und Skalierbarkeit.
|
||||||
|
|
||||||
|
**Stichwoerter (Tags):** AI-augmented Engineering · Hosting nach Vorgabe · Software-Qualitaet · Nachvollziehbarkeit
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 3 von 4 – Warum PowerOn
|
||||||
|
|
||||||
|
### Kennzahl-Band (qualifiziert, nicht als Garantie)
|
||||||
|
**Deutlich schneller als klassische Monatsprojekte** – je nach Ausgangslage und Vorhaben.
|
||||||
|
|
||||||
|
*(Die fruehere Formulierung „10x“ nur verwenden, wenn ihr sie pro Kunde belegen koennt.)*
|
||||||
|
|
||||||
|
### Titel
|
||||||
|
**WARUM POWERON**
|
||||||
|
|
||||||
|
### Untertitel
|
||||||
|
**Nicht nur schnell – strukturell besser.**
|
||||||
|
|
||||||
|
### Kurzzeile
|
||||||
|
KI-gestuetzt bauen – mit nachvollziehbaren Ergebnissen und klarem Scope.
|
||||||
|
|
||||||
|
### Block 1 – AI-Augmented-Engineering-Plattform
|
||||||
|
PowerOn ist keine Ad-hoc-Einzelloesung: eine **Plattform fuer AI-augmented Engineering**. Ihre **Software-Loesung** laeuft in einer Umgebung, die fuer **Skalierung** und **Sicherheit** ausgelegt ist; die KI steigert die **Liefergeschwindigkeit** des Teams, nicht das Chat-Erlebnis allein.
|
||||||
|
|
||||||
|
### Block 2 – Erfolgsgebundene Verguetung
|
||||||
|
**CHF 7'000** (78% von CHF 9'000) zahlen Sie bei **nachgewiesenen, vorab schriftlich vereinbarten Erfolgszielen** im Pilot. So teilen wir das Ergebnisrisiko transparent.
|
||||||
|
|
||||||
|
### Block 3 – Sicherheit und Compliance fuer Ihr Projekt
|
||||||
|
**Datenhaltung nach Vorgabe:** Schweizer Hosting, Cloud nach Wahl oder andere Modelle – wir stimmen das **im Erstgespraech** mit Ihrer IT und Compliance ab.
|
||||||
|
|
||||||
|
### Block 4 – Enablement statt Abhaengigkeit
|
||||||
|
Wissenstransfer ist **fester Bestandteil**. Ihr Team versteht Bedienung, Grenzen und Weiterentwicklung **im vereinbarten Rahmen**.
|
||||||
|
|
||||||
|
### Messbare Ergebnisse (als Zielgroessen, nicht als Versprechen)
|
||||||
|
*Formulierung fuer Folie:*
|
||||||
|
|
||||||
|
**Im Pilot messen wir gemeinsam – typische Zielgroessen (je nach Vorhaben):**
|
||||||
|
- Funktionalitaet und Stabilitaet der Loesung im Alltag
|
||||||
|
- Zeit bis zur **ersten pilotfaehigen** Nutzung: **48 Stunden** Umsetzungsblock (plus vereinbarter Pilot)
|
||||||
|
- Wirtschaftlichkeit: Break-even haengt von internem Aufwand und Volumen ab – **kein fixer Monatswert ohne Daten**
|
||||||
|
|
||||||
|
*(Die frueheren harten Zahlen **-70%** und **<6 Monate** nur nutzen, wenn ihr sie durch Pilotdaten oder Rechnungsbeispiele stuetzt; sonst weglassen oder als „Illustration“ kennzeichnen.)*
|
||||||
|
|
||||||
|
### Proof-Box – Abraxas *(nur bei schriftlicher Kundenfreigabe nennen)*
|
||||||
|
|
||||||
|
**Variante A – mit Namensnennung (bei Freigabe durch Abraxas):**
|
||||||
|
|
||||||
|
> **Referenz (Methodik):** PowerOn hat die **DATA-Hub-Backend-Migration** fuer die **Abraxas Informatik AG** in **11 Tagen** umgesetzt (Node.js/TypeScript zu .NET/C#) – mit klaren Phasen, Reviews und Wissenstransfer.
|
||||||
|
> **Botschaft:** Dieselbe **Lieferdisziplin** nutzen wir, um Ihren **Software-Piloten** im Rahmen des **48h AI Sprint** schnell und kontrolliert zu bringen.
|
||||||
|
> **Details:** Case Study auf Anfrage.
|
||||||
|
|
||||||
|
**Variante B – ohne Namen (wenn keine Freigabe), ausfuehrlich fuer Proof-Folie / HTML:**
|
||||||
|
|
||||||
|
**Referenzcase (anonymisiert) – Backend-Migration unter Zeitdruck**
|
||||||
|
|
||||||
|
*Folie / Layout: optional zwei Spalten „Ausgangslage | Vorgehen“ oder vier Zeilen unten als Timeline.*
|
||||||
|
|
||||||
|
- **Ausgangslage**
|
||||||
|
- Fuehrendes **Schweizer Softwarehaus**; **geschaeftskritisches** Plattform-Backend (DATA-Hub-Umfeld)
|
||||||
|
- Jahre gewachsen, mehrere fruehere Partner, **hohe technische Schulden** und **Pentest-relevante Security-Themen**
|
||||||
|
- **Wissensluecken:** **10** Themenbereiche, **49** Klaerungsfragen vor Migration (strukturiert beantwortet)
|
||||||
|
- Klassische Groessenordnung: **3–6 Monate** statt Wochen
|
||||||
|
|
||||||
|
- **Vorgehen (4 Phasen, KI-gestuetzt, Human-in-the-Loop)**
|
||||||
|
1. **Analyse & Dokumentation** (ca. 2–3 Tage): **47** TypeScript-Dateien inventarisiert und dokumentiert
|
||||||
|
2. **Technische Spezifikation** (ca. 2–3 Tage): Ziel **.NET/C#**, Architekturentscheide, **Reviews** mit Kundenteam, **Acceptance Criteria**
|
||||||
|
3. **Execution** (ca. **1 Tag** Migration): **Node.js/TypeScript → .NET/C#**, Modul fuer Modul **architektonisch validiert**, Tests parallel
|
||||||
|
4. **Testing & Uebergabe** (ca. 2–3 Tage): Validierung, automatisierte Tests, **Wissenstransfer** (Training on the Job, Video)
|
||||||
|
|
||||||
|
- **Ergebnis**
|
||||||
|
- **11 Kalendertage** Gesamt (Kickoff bis uebergabefaehige Basis)
|
||||||
|
- **~1 Tag** fuer eigentliche Code-Migration; Tech Debt und Security-Punkte **adressiert**; Team **befaehigt**
|
||||||
|
- Relativ zu klassisch **mehrmonatiger** Migration: **ca. 10x schneller** in diesem Fall *(Indikator, keine Garantie fuer andere Vorhaben)*
|
||||||
|
|
||||||
|
- **Transfer zum 48h AI Sprint**
|
||||||
|
- Gleiche Logik: **fester Scope**, Phasen, messbare Abnahme, **Enablement** als Kern – nicht als Zusatz
|
||||||
|
|
||||||
|
*Hinweis:* Kein Garantieversprechen pro Use Case. **Vollstaendige Case Study / namentliche Referenz** nur auf Anfrage und mit **schriftlicher Kundenfreigabe**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Geeignet fuer (Software-Vorhaben im Paketrahmen)
|
||||||
|
- **MVP-** und Pilotbau, Prototyping
|
||||||
|
- **Backend-Migration** und Stack-Wechsel
|
||||||
|
- **Systemintegration** (APIs, Daten, Identity)
|
||||||
|
- **Prozessautomatisierung** und interne Tools
|
||||||
|
- **Legacy-Modernisierung** in abgegrenztem Schnitt
|
||||||
|
- Dokumentenverarbeitung, Reporting, Freigabe-Workflows – im vereinbarten Umfang
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 4 von 4 – Wer wir sind
|
||||||
|
|
||||||
|
### Titelzeile
|
||||||
|
**Wir kombinieren Strategie, Technologie und Umsetzungskraft.**
|
||||||
|
|
||||||
|
### Ueberschrift
|
||||||
|
**WER WIR SIND – Das PowerOn-Team**
|
||||||
|
|
||||||
|
### Lieferfaehigkeit 48h AI Sprint (eine Zeile, fuer Erstleser)
|
||||||
|
**Dieses Team fuehrt Discovery, Architektur, Build und Handover im 48h AI Sprint – End-to-End, mit klaren Meilensteinen.**
|
||||||
|
|
||||||
|
### Patrick Motsch
|
||||||
|
**CEO/CTO** – Steuert technische Umsetzung und komplexe IT-Projekte; sorgt dafuer, dass Ihr Software-Pilot in PowerOn produktiv wird und betreibbar bleibt.
|
||||||
|
**Mission:** Schneller, nachvollziehbarer **Softwarebau** fuer Schweizer Unternehmen – mit KI als Engineering-Hebel.
|
||||||
|
|
||||||
|
### Ida Dittrich
|
||||||
|
**Product Architect** – Verantwortet Architektur, Qualitaet und Machbarkeit auf der PowerOn-Plattform – damit Scope, Daten und Integration im 48h-Rahmen stimmig bleiben.
|
||||||
|
|
||||||
|
### Stephan Schellworth
|
||||||
|
**Business Integration** – Verbindet Vorhaben, Stakeholder und Projektsteuerung – damit Erfolgsziele vor dem Start klar sind und der Pilot messbar bleibt.
|
||||||
|
|
||||||
|
### Rollen (einzeilig, Fusszeile / Karten)
|
||||||
|
- **Patrick Motsch** – CEO/CTO
|
||||||
|
- **Ida Dittrich** – Product Architect
|
||||||
|
- **Stephan Schellworth** – Business Integration
|
||||||
|
|
||||||
|
### Kontakt und naechster Schritt
|
||||||
|
**PowerOn AG**
|
||||||
|
Birmensdorferstrasse 94, 8003 Zuerich
|
||||||
|
|
||||||
|
**15-Minuten-Check buchen:** [www.poweron.swiss](https://www.poweron.swiss) · **E-Mail:** info@poweron.swiss
|
||||||
|
*(Betreff: „48h AI Sprint – 15-Minuten-Check“; Kalenderlink ergaenzen, sobald verfuegbar.)*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Checkliste vor PDF-Export
|
||||||
|
|
||||||
|
- [ ] Referenz: **Variante A (Abraxas namentlich)** oder **Variante B (anonym, ausfuehrlich)** gewaehlt; Freigabe fuer Namensnennung liegt vor?
|
||||||
|
- [ ] Alle **PowerOn**-Schreibweisen vereinheitlicht; Produktname **48h AI Sprint** konsistent
|
||||||
|
- [ ] Keine **doppelten** Preisboxen auf Folie 1
|
||||||
|
- [ ] **Kennzahlen** nur in der gewaehlten Strenge (hart vs. qualifiziert)
|
||||||
|
- [ ] **CTA** mit realem Kalenderlink oder zentraler E-Mail belegt
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hinweis zur Datei in Downloads
|
||||||
|
|
||||||
|
Die bisherige Datei `20260320_AI_Hackathon.pdf` bitte **inhaltlich** an dieses Dokument anpassen (Design kann gleich bleiben). Diese Markdown-Datei ist die **autoritative Textfassung**.
|
||||||
494
docs/launch48-offer-page.html
Normal file
494
docs/launch48-offer-page.html
Normal file
|
|
@ -0,0 +1,494 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="de-CH">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<meta name="description" content="PowerOn 48h AI Sprint: pilotfaehiger MVP oder Software-Pilot in 48 Stunden, KI-gestuetztes Engineering, Fixpreis CHF 9'000, CHF 7'000 erfolgsgebunden.">
|
||||||
|
<title>PowerOn 48h AI Sprint | MVP in 48 Stunden</title>
|
||||||
|
<style>
|
||||||
|
:root {
|
||||||
|
--po-blue: #1976d2;
|
||||||
|
--po-blue-dark: #12579b;
|
||||||
|
--po-teal: #00897b;
|
||||||
|
--text: #1a1a2e;
|
||||||
|
--text-muted: #5c5c6f;
|
||||||
|
--bg: #f8fafc;
|
||||||
|
--card: #ffffff;
|
||||||
|
--border: #e2e8f0;
|
||||||
|
--radius: 12px;
|
||||||
|
--shadow: 0 4px 24px rgba(25, 118, 210, 0.08);
|
||||||
|
--max: 1080px;
|
||||||
|
}
|
||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
font-family: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
||||||
|
font-size: 1.0625rem;
|
||||||
|
line-height: 1.55;
|
||||||
|
color: var(--text);
|
||||||
|
background: var(--bg);
|
||||||
|
}
|
||||||
|
a { color: var(--po-blue); text-decoration: none; }
|
||||||
|
a:hover { text-decoration: underline; }
|
||||||
|
.wrap { max-width: var(--max); margin: 0 auto; padding: 0 1.25rem; }
|
||||||
|
/* Header */
|
||||||
|
header.site {
|
||||||
|
background: linear-gradient(135deg, var(--po-blue-dark) 0%, var(--po-blue) 48%, #1565c0 100%);
|
||||||
|
color: #fff;
|
||||||
|
padding: 2.5rem 0 3.25rem;
|
||||||
|
}
|
||||||
|
.badge {
|
||||||
|
display: inline-block;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0.06em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
opacity: 0.92;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
header h1 {
|
||||||
|
margin: 0 0 0.75rem;
|
||||||
|
font-size: clamp(1.85rem, 4.5vw, 2.5rem);
|
||||||
|
font-weight: 700;
|
||||||
|
line-height: 1.15;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
}
|
||||||
|
.hero-lead {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
max-width: 38rem;
|
||||||
|
opacity: 0.96;
|
||||||
|
margin: 0 0 0.5rem;
|
||||||
|
}
|
||||||
|
.hero-outcome {
|
||||||
|
font-size: 1.02rem;
|
||||||
|
font-weight: 600;
|
||||||
|
line-height: 1.45;
|
||||||
|
max-width: 40rem;
|
||||||
|
opacity: 0.98;
|
||||||
|
margin: 0 0 1.35rem;
|
||||||
|
}
|
||||||
|
.pills {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: 0.65rem;
|
||||||
|
margin-bottom: 1.25rem;
|
||||||
|
}
|
||||||
|
.pill {
|
||||||
|
background: rgba(255,255,255,0.14);
|
||||||
|
border: 1px solid rgba(255,255,255,0.28);
|
||||||
|
padding: 0.5rem 1rem;
|
||||||
|
border-radius: 999px;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
.pill strong { font-weight: 700; }
|
||||||
|
.payment-line {
|
||||||
|
font-size: 0.95rem;
|
||||||
|
opacity: 0.95;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
max-width: 40rem;
|
||||||
|
}
|
||||||
|
.trust-line {
|
||||||
|
font-size: 0.88rem;
|
||||||
|
opacity: 0.85;
|
||||||
|
max-width: 42rem;
|
||||||
|
margin-bottom: 1.75rem;
|
||||||
|
}
|
||||||
|
header.site .pill-hint {
|
||||||
|
font-size: 0.8rem;
|
||||||
|
opacity: 0.82;
|
||||||
|
max-width: 42rem;
|
||||||
|
margin: 0.5rem 0 1rem;
|
||||||
|
}
|
||||||
|
.btn {
|
||||||
|
display: inline-block;
|
||||||
|
background: #fff;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
font-weight: 600;
|
||||||
|
padding: 0.85rem 1.5rem;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
box-shadow: 0 2px 12px rgba(0,0,0,0.12);
|
||||||
|
border: none;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 1rem;
|
||||||
|
}
|
||||||
|
.btn:hover { text-decoration: none; opacity: 0.95; }
|
||||||
|
.btn-secondary {
|
||||||
|
background: transparent;
|
||||||
|
color: #fff;
|
||||||
|
border: 2px solid rgba(255,255,255,0.55);
|
||||||
|
box-shadow: none;
|
||||||
|
margin-left: 0.5rem;
|
||||||
|
}
|
||||||
|
@media (max-width: 560px) {
|
||||||
|
.btn-secondary { margin-left: 0; margin-top: 0.65rem; display: inline-block; }
|
||||||
|
}
|
||||||
|
/* Sections */
|
||||||
|
section {
|
||||||
|
padding: 3rem 0;
|
||||||
|
}
|
||||||
|
section.alt { background: #fff; }
|
||||||
|
h2 {
|
||||||
|
margin: 0 0 0.35rem;
|
||||||
|
font-size: clamp(1.35rem, 3vw, 1.65rem);
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.section-intro {
|
||||||
|
color: var(--text-muted);
|
||||||
|
margin: 0 0 1.75rem;
|
||||||
|
max-width: 38rem;
|
||||||
|
}
|
||||||
|
/* Pain grid */
|
||||||
|
.grid-3 {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
|
||||||
|
gap: 1.25rem;
|
||||||
|
}
|
||||||
|
.card {
|
||||||
|
background: var(--card);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius);
|
||||||
|
padding: 1.35rem 1.5rem;
|
||||||
|
box-shadow: var(--shadow);
|
||||||
|
}
|
||||||
|
.card h3 {
|
||||||
|
margin: 0 0 0.5rem;
|
||||||
|
font-size: 1.05rem;
|
||||||
|
color: var(--po-blue);
|
||||||
|
}
|
||||||
|
.card p { margin: 0; color: var(--text-muted); font-size: 0.98rem; }
|
||||||
|
/* Phases */
|
||||||
|
.phases-head { text-align: center; margin-bottom: 2rem; }
|
||||||
|
.phases-head h2 { margin-bottom: 0.35rem; }
|
||||||
|
.phases-head .sub { color: var(--text-muted); margin: 0; font-size: 1rem; }
|
||||||
|
.steps {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
|
||||||
|
gap: 1rem;
|
||||||
|
counter-reset: step;
|
||||||
|
}
|
||||||
|
.step {
|
||||||
|
position: relative;
|
||||||
|
background: var(--card);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: var(--radius);
|
||||||
|
padding: 1.35rem 1.25rem 1.25rem 1.35rem;
|
||||||
|
border-top: 4px solid var(--po-blue);
|
||||||
|
}
|
||||||
|
.step:nth-child(2) { border-top-color: #e65100; }
|
||||||
|
.step:nth-child(3) { border-top-color: #7b1fa2; }
|
||||||
|
.step:nth-child(4) { border-top-color: #c2185b; }
|
||||||
|
.step-num {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--po-blue);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
margin-bottom: 0.35rem;
|
||||||
|
}
|
||||||
|
.step:nth-child(2) .step-num { color: #e65100; }
|
||||||
|
.step:nth-child(3) .step-num { color: #7b1fa2; }
|
||||||
|
.step:nth-child(4) .step-num { color: #c2185b; }
|
||||||
|
.step h3 { margin: 0 0 0.5rem; font-size: 1.05rem; }
|
||||||
|
.step p { margin: 0; font-size: 0.92rem; color: var(--text-muted); line-height: 1.5; }
|
||||||
|
/* Why */
|
||||||
|
.why-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
gap: 1.25rem;
|
||||||
|
}
|
||||||
|
.why-card h3 { margin: 0 0 0.5rem; font-size: 1.05rem; }
|
||||||
|
.why-card p { margin: 0; color: var(--text-muted); font-size: 0.95rem; }
|
||||||
|
/* Proof */
|
||||||
|
.proof {
|
||||||
|
background: linear-gradient(180deg, #f0f7fc 0%, #fff 100%);
|
||||||
|
border: 1px solid #cfe8fc;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
padding: 1.5rem 1.75rem;
|
||||||
|
margin-top: 2rem;
|
||||||
|
}
|
||||||
|
.proof h3 { margin: 0 0 0.5rem; font-size: 1.05rem; color: var(--po-blue-dark); }
|
||||||
|
.proof h4 {
|
||||||
|
margin: 1.1rem 0 0.4rem;
|
||||||
|
font-size: 0.82rem;
|
||||||
|
font-weight: 700;
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
color: var(--po-blue);
|
||||||
|
}
|
||||||
|
.proof h4:first-of-type { margin-top: 0; }
|
||||||
|
.proof p { margin: 0 0 0.55rem; font-size: 0.95rem; color: var(--text); line-height: 1.55; }
|
||||||
|
.proof .fine { font-size: 0.8rem; color: var(--text-muted); margin-top: 0.85rem; margin-bottom: 0; }
|
||||||
|
.deliverables-strip {
|
||||||
|
margin-top: 1.75rem;
|
||||||
|
padding: 1.25rem 1.5rem;
|
||||||
|
background: #f1f5f9;
|
||||||
|
border-radius: var(--radius);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
}
|
||||||
|
.deliverables-strip h3 {
|
||||||
|
margin: 0 0 0.65rem;
|
||||||
|
font-size: 0.95rem;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.deliverables-strip ul {
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 1.2rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
font-size: 0.92rem;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
.deliverables-strip li { margin-bottom: 0.35rem; }
|
||||||
|
/* Use cases */
|
||||||
|
ul.check {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||||
|
gap: 0.5rem 1.5rem;
|
||||||
|
}
|
||||||
|
ul.check li {
|
||||||
|
padding-left: 1.35rem;
|
||||||
|
position: relative;
|
||||||
|
font-size: 0.95rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
ul.check li::before {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0.45rem;
|
||||||
|
width: 0.5rem;
|
||||||
|
height: 0.5rem;
|
||||||
|
background: var(--po-teal);
|
||||||
|
border-radius: 2px;
|
||||||
|
}
|
||||||
|
/* CTA footer */
|
||||||
|
.cta-band {
|
||||||
|
background: var(--po-blue);
|
||||||
|
color: #fff;
|
||||||
|
text-align: center;
|
||||||
|
padding: 2.75rem 1.25rem;
|
||||||
|
}
|
||||||
|
.cta-band h2 { color: #fff; margin-bottom: 0.5rem; }
|
||||||
|
.cta-band p { opacity: 0.92; margin: 0 0 1.25rem; }
|
||||||
|
.cta-band .btn { color: var(--po-blue); }
|
||||||
|
.cta-band .btn.btn-secondary {
|
||||||
|
color: #fff;
|
||||||
|
border-color: rgba(255, 255, 255, 0.55);
|
||||||
|
}
|
||||||
|
footer.legal {
|
||||||
|
padding: 1.5rem 1.25rem;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
.skip-link {
|
||||||
|
position: absolute;
|
||||||
|
left: -9999px;
|
||||||
|
}
|
||||||
|
.skip-link:focus { left: 1rem; top: 1rem; z-index: 100; background: #fff; padding: 0.5rem; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<a class="skip-link" href="#main">Zum Inhalt</a>
|
||||||
|
|
||||||
|
<header class="site">
|
||||||
|
<div class="wrap">
|
||||||
|
<p class="badge">PowerOn · 48h AI Sprint · 2026</p>
|
||||||
|
<h1>48h AI Sprint</h1>
|
||||||
|
<p class="hero-lead">Von der Idee zum <strong>pilotfaehigen MVP</strong> in <strong>48 Stunden</strong> – gebaut mit <strong>KI-gestuetztem Engineering</strong> durch das PowerOn-Team, zum <strong>Fixpreis</strong> und mit Erfolgsanteil. Die KI ist unser Produktivitaetshebel – Ihr Lieferobjekt ist <strong>funktionierende Software</strong>.</p>
|
||||||
|
<p class="hero-outcome">Sie erhalten einen <strong>funktionierenden Software-Piloten</strong> im vereinbarten Umfang: spezifiziert, umgesetzt, getestet, dokumentiert – plus <strong>schriftlich fixierte Erfolgsziele</strong> fuer Abnahme und die zweite Zahlungsstufe.</p>
|
||||||
|
<div class="pills" role="list">
|
||||||
|
<span class="pill" role="listitem"><strong>48 Stunden</strong> Umsetzungsblock</span>
|
||||||
|
<span class="pill" role="listitem"><strong>Fixpreis CHF 9’000</strong></span>
|
||||||
|
<span class="pill" role="listitem"><strong>CHF 7’000</strong> erfolgsgebunden</span>
|
||||||
|
</div>
|
||||||
|
<p class="pill-hint">CHF 7’000 entspricht 78% des Pakets – wird faellig, sobald die <strong>vorab vereinbarten Erfolgsziele</strong> im Pilot nachgewiesen sind.</p>
|
||||||
|
<p class="payment-line"><strong>Zahlungslogik:</strong> CHF 2’000 zu Projektstart · CHF 7’000 bei erfuellten, schriftlich definierten Pilot-Zielen.</p>
|
||||||
|
<p class="trust-line"><strong>Gemeinsame Entscheidungsbasis:</strong> fester Leistungsrahmen, keine offene Stundenhonorarspirale. Betrieb, Datenfluesse und Freigaben stimmen wir <strong>vor</strong> dem 48h-Block mit Ihrer IT und Compliance ab.</p>
|
||||||
|
<p>
|
||||||
|
<a class="btn" href="https://www.poweron.swiss" target="_blank" rel="noopener">15-Minuten-Check – passt Ihr Vorhaben?</a>
|
||||||
|
<a class="btn btn-secondary" href="mailto:info@poweron.swiss?subject=48h%20AI%20Sprint%20%E2%80%93%2015-Minuten-Check">E-Mail</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main id="main">
|
||||||
|
<section class="alt" aria-labelledby="pain-title">
|
||||||
|
<div class="wrap">
|
||||||
|
<h2 id="pain-title">Warum viele bei Software-Vorhaben zoegern</h2>
|
||||||
|
<p class="section-intro">MVPs und Piloten rutschen oft in lange Vorlaufphasen – Scope wabert, Budget bleibt offen, der erste echte Nutzen kommt zu spaet. Der <strong>48h AI Sprint</strong> verbindet <strong>Tempo im Umsetzungsblock</strong>, einen <strong>festen Leistungsrahmen</strong> und eine <strong>messbare Pilot-Abnahme</strong>.</p>
|
||||||
|
<div class="grid-3">
|
||||||
|
<div class="card">
|
||||||
|
<h3>Scope ohne Schärfe</h3>
|
||||||
|
<p>Ohne klare Grenzen wächst das Vorhaben ständig – und Ende offen statt Lieferdatum. Sie brauchen einen <strong>abgeschlossenen Pilot-Schnitt</strong>, der sich bewerten lässt.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Budget ohne Plan</h3>
|
||||||
|
<p>Stundensätze und offene Schätzungen machen Einkauf und Führung nervös. <strong>Fixpreis plus erfolgsgebundener Anteil</strong> schafft eine gemeinsame Entscheidungsbasis.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Zu lange bis zum MVP</h3>
|
||||||
|
<p>Klassische Monatsprojekte versanden leicht zwischen Workshops und Spezifikationen. Sie wollen <strong>schnell sehen</strong>, ob Architektur, Integration und Nutzen im Alltag tragen.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section aria-labelledby="outcome-title">
|
||||||
|
<div class="wrap">
|
||||||
|
<h2 id="outcome-title">Was Sie am Ende haben</h2>
|
||||||
|
<p class="section-intro">Konkrete Software-Lieferobjekte statt Folienstapel: alles, was Sie brauchen, um im Alltag zu testen, zu messen und intern zu entscheiden – ob und wie Sie skalieren.</p>
|
||||||
|
<div class="grid-3">
|
||||||
|
<div class="card">
|
||||||
|
<h3>Funktionierender Software-Pilot</h3>
|
||||||
|
<p>Eine <strong>pilotfaehige Loesung</strong> im vereinbarten Umfang – z. B. MVP, Integrations-Schnittstelle, Automatisierung oder Migrationsschritt – mit realistischen Testfaellen aus Ihrem Alltag, nicht als reines Konzeptpapier.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Daten, Integration, Abnahme</h3>
|
||||||
|
<p><strong>Freigegebene</strong> Datenquellen und <strong>eine</strong> Systemanbindung wie vereinbart. <strong>Erfolgsziele und Abnahmekriterien</strong> sind vor dem 48h-Block schriftlich fixiert – dieselbe Sprache fuer Fachbereich, IT und Einkauf.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Betrieb & Wissenstransfer</h3>
|
||||||
|
<p>Kurze Dokumentation, Einweisung und Uebergabe, damit Ihr Team die Loesung im Paketrahmen <strong>selbststaendig weiterbetreiben oder ausbauen</strong> kann – inklusive Grenzen, Rollen und naechster sinnvoller Schritte.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="deliverables-strip">
|
||||||
|
<h3>Zeitlicher Rahmen (Orientierung)</h3>
|
||||||
|
<ul>
|
||||||
|
<li><strong>Vorbereitung</strong> vor dem Block: Discovery, Architektur, Freigaben – typischerweise einige Arbeitstage.</li>
|
||||||
|
<li><strong>48 Stunden</strong> intensiver Umsetzungsblock gemeinsam mit Ihrem Team.</li>
|
||||||
|
<li><strong>Pilotphase</strong> danach: Messfenster fuer die vereinbarten Erfolgsziele (Dauer wie im Angebot festgelegt).</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="alt" aria-labelledby="flow-title">
|
||||||
|
<div class="wrap">
|
||||||
|
<div class="phases-head">
|
||||||
|
<h2 id="flow-title">Der Ablauf</h2>
|
||||||
|
<p class="sub"><strong>Vier Phasen</strong> · <strong>48 Stunden</strong> Umsetzung · <strong>gemeinsam</strong> mit Ihrem Team</p>
|
||||||
|
<p class="sub" style="margin-top:0.5rem">Ihre freigegebenen Systeme und Daten – umgesetzt durch <strong>KI-gestuetztes Engineering</strong> erfahrener Architektinnen und Architekten auf der PowerOn-Plattform, mit nachvollziehbaren Abläufen und wachsender Skalierbarkeit.</p>
|
||||||
|
</div>
|
||||||
|
<div class="steps">
|
||||||
|
<article class="step">
|
||||||
|
<div class="step-num">Phase 1</div>
|
||||||
|
<h3>Discovery</h3>
|
||||||
|
<p>Gemeinsame Analyse: Wir waehlen den Use-Case mit dem groessten Hebel – messbar und in 48 Stunden realistisch umsetzbar.</p>
|
||||||
|
</article>
|
||||||
|
<article class="step">
|
||||||
|
<div class="step-num">Phase 2</div>
|
||||||
|
<h3>Design & Architektur</h3>
|
||||||
|
<p>Software-Architektur, Datenmodell und Integration auf PowerOn. <strong>Erfolgsziele</strong> werden vor dem Start schriftlich fixiert – sie steuern die Erfolgszahlung.</p>
|
||||||
|
</article>
|
||||||
|
<article class="step">
|
||||||
|
<div class="step-num">Phase 3</div>
|
||||||
|
<h3>Build & Integration</h3>
|
||||||
|
<p>Build der Loesung, Anbindung im Vereinbarten, Tests. Ihre Fachseite prueft mit – parallel zum Build, mit Alltags-Beispielen.</p>
|
||||||
|
</article>
|
||||||
|
<article class="step">
|
||||||
|
<div class="step-num">Phase 4</div>
|
||||||
|
<h3>Deploy & Handover</h3>
|
||||||
|
<p>Go-Live in Ihrer <strong>vereinbarten PowerOn-Umgebung</strong>, Wissenstransfer und Dokumentation für den laufenden Betrieb.</p>
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section aria-labelledby="why-title">
|
||||||
|
<div class="wrap">
|
||||||
|
<h2 id="why-title">Warum PowerOn</h2>
|
||||||
|
<p class="section-intro">Nicht nur schnell – strukturell besser: <strong>AI-augmented Engineering</strong> auf einer Plattform mit klaren Rollen und nachvollziehbaren Ergebnissen.</p>
|
||||||
|
<div class="why-grid">
|
||||||
|
<div class="card why-card">
|
||||||
|
<h3>AI-Augmented-Engineering-Plattform</h3>
|
||||||
|
<p>PowerOn ist keine Ad-hoc-Einzelloesung. Wir bauen Ihre Loesung auf einer Umgebung, auf der <strong>KI-gestuetzte Produktivitaet</strong> und klassische Softwarequalitaet zusammenkommen – Skalierung, Rechte, Nachvollziehbarkeit inbegriffen.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card why-card">
|
||||||
|
<h3>Erfolg teilen</h3>
|
||||||
|
<p><strong>CHF 7’000</strong> (78% von CHF 9’000) werden erst faellig, wenn die <strong>vorab schriftlich vereinbarten Erfolgsziele</strong> im Pilot nachgewiesen sind.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card why-card">
|
||||||
|
<h3>Daten nach Vorgabe</h3>
|
||||||
|
<p>Sicherheit und Compliance fuer Ihr Softwareprojekt: Hosting- und Verarbeitungsmodell stimmen wir mit Ihrer IT ab – vom Schweizer Rechenzentrum bis zu definierten Cloud-Szenarien.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card why-card">
|
||||||
|
<h3>Enablement</h3>
|
||||||
|
<p>Wissenstransfer ist fester Bestandteil. Ihr Team versteht Bedienung, Grenzen und naechste Schritte im vereinbarten Rahmen.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<aside class="proof" aria-labelledby="proof-title">
|
||||||
|
<h3 id="proof-title">Referenzcase (anonymisiert) – Backend-Migration unter Zeitdruck</h3>
|
||||||
|
<!-- Anonym: keine Kundennamen. Variante mit Namensnennung: siehe launch48-deck-presentation.md Variante A. -->
|
||||||
|
<h4>Ausgangslage</h4>
|
||||||
|
<p>Ein fuehrendes <strong>Schweizer Softwarehaus</strong> modernisierte ein <strong>geschaeftskritisches Plattform-Backend</strong> (DATA-Hub-Umfeld): ueber Jahre gewachsen, mehrere fruehere Partner, <strong>erhebliche technische Schulden</strong> und <strong>Sicherheitsbefunde</strong> (u.a. aus Pentests). Zusaetzlich <strong>Wissensluecken</strong> im Bestand – strukturiert geklaert in <strong>10 Themenbereichen</strong> mit <strong>49 Detailfragen</strong> vor der Umsetzung. Ein vergleichbares Vorhaben waere klassisch oft mit <strong>3–6 Monaten</strong> und hohem internen Aufwand geplant worden.</p>
|
||||||
|
<h4>Vorgehen (4 Phasen, KI-gestuetzt, Human-in-the-Loop)</h4>
|
||||||
|
<p><strong>1. Analyse & Dokumentation</strong> (ca. 2–3 Tage): Inventur und strukturierte Analyse von <strong>47 TypeScript-Dateien</strong>, Geschaeftslogik und Abhaengigkeiten; Ergebnis in dokumentierter Form fuer Entscheid und Migration.</p>
|
||||||
|
<p><strong>2. Technische Spezifikation</strong> (ca. 2–3 Tage): Zielarchitektur <strong>.NET / C#</strong> (u.a. ORM, APIs, Anbindungen an Messaging, Object Storage, Identity); <strong>Review-Sessions</strong> mit dem Kundenteam; feste <strong>Acceptance Criteria</strong>.</p>
|
||||||
|
<p><strong>3. Execution</strong> (ca. <strong>1 Tag</strong> fuer die eigentliche Code-Migration): KI unterstuetzt die Uebersetzung <strong>Node.js/TypeScript → .NET/C#</strong>; erfahrene Architektinnen und Architekten <strong>validieren jedes Modul</strong>; Tests und Integration laufen parallel.</p>
|
||||||
|
<p><strong>4. Testing & Uebergabe</strong> (ca. 2–3 Tage): Mock- und End-to-End-Validierung, Testdaten, automatisierte Tests; <strong>Wissenstransfer</strong> (Training on the Job, aufgezeichnete Session) – damit das interne Team den Ansatz <strong>eigenstaendig fortsetzen</strong> kann.</p>
|
||||||
|
<h4>Ergebnis</h4>
|
||||||
|
<p>Gesamtprojekt <strong>11 Kalendertage</strong> vom Kickoff bis zur uebergabefaehigen .NET/C#-Basis – bei gleichzeitiger <strong>Bereinigung von Tech Debt</strong>, Adressierung relevanter <strong>Security-Punkte</strong> und <strong>vollstaendigem Enablement</strong> des Kundenteams. Relativ zur typischen Planungsgroessenordnung <strong>mehrmonatiger</strong> klassischer Migration: <strong>ca. 10x schnellere</strong> Time-to-Result in diesem Fall (kein Uebertragungsversprechen fuer jedes Projekt).</p>
|
||||||
|
<h4>Transfer zum 48h AI Sprint</h4>
|
||||||
|
<p>Dieselbe Lieferdisziplin nutzen wir fuer Ihren <strong>Software-Piloten</strong>: <strong>klarer Scope</strong>, feste Phasen, nachvollziehbare Zwischenresultate, messbare Abnahme – plus <strong>Wissenstransfer</strong> als fester Bestandteil, nicht als Zusatz.</p>
|
||||||
|
<p class="fine">Kein Garantieversprechen pro Use Case; Dauer und Aufwand haengen von Ausgangslage und Freigaben ab. <strong>Vollstaendige Case Study und namentliche Referenz</strong> auf Anfrage und nur mit Kundenfreigabe.</p>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
<h2 style="margin-top:2.5rem;">Geeignet fuer</h2>
|
||||||
|
<p class="section-intro">Typische Einstiege – immer im vereinbarten Paketrahmen:</p>
|
||||||
|
<ul class="check">
|
||||||
|
<li><strong>MVP-</strong> und Pilotbau (neue Produkte, Schnittstellen, Prozesse)</li>
|
||||||
|
<li><strong>Backend-Migration</strong> und Stack-Wechsel (wie im Referenzcase)</li>
|
||||||
|
<li><strong>Systemintegration</strong> (APIs, Messaging, Identity, Datenpipelines)</li>
|
||||||
|
<li><strong>Prozessautomatisierung</strong> und interne Tools</li>
|
||||||
|
<li><strong>Prototyping</strong> und Machbarkeitsnachweis vor groesserem Budget</li>
|
||||||
|
<li><strong>Legacy-Modernisierung</strong> in abgegrenztem Schnitt</li>
|
||||||
|
<li>Dokumentenverarbeitung, Reporting, Freigabe-Workflows – im vereinbarten Umfang</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section class="alt" aria-labelledby="team-title">
|
||||||
|
<div class="wrap">
|
||||||
|
<h2 id="team-title">Das PowerOn-Team</h2>
|
||||||
|
<p class="section-intro">Wir kombinieren Strategie, Technologie und Umsetzungskraft. Dieses Team fuehrt Discovery, Architektur, Build und Handover im <strong>48h AI Sprint</strong> – End-to-End, mit klaren Meilensteinen.</p>
|
||||||
|
<div class="grid-3">
|
||||||
|
<div class="card">
|
||||||
|
<h3>Patrick Motsch</h3>
|
||||||
|
<p><strong>CEO/CTO</strong> – Steuert technische Umsetzung und komplexe IT-Projekte; sorgt dafuer, dass Ihr Software-Pilot in PowerOn produktiv wird und betreibbar bleibt.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Ida Dittrich</h3>
|
||||||
|
<p><strong>Product Architect</strong> – Verantwortet Architektur, Qualitaet und Machbarkeit auf der PowerOn-Plattform – damit Scope, Daten und Integration im 48h-Rahmen stimmig bleiben.</p>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<h3>Stephan Schellworth</h3>
|
||||||
|
<p><strong>Business Integration</strong> – Verbindet Use Case, Stakeholder und Projektsteuerung – damit Erfolgsziele vor dem Start klar sind und der Pilot messbar bleibt.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
</main>
|
||||||
|
|
||||||
|
<div class="cta-band">
|
||||||
|
<div class="wrap">
|
||||||
|
<h2>Bereit fuer den 15-Minuten-Check?</h2>
|
||||||
|
<p>Wir sagen ehrlich, ob Ihr Vorhaben zum <strong>48h AI Sprint</strong> passt – ohne Druck.</p>
|
||||||
|
<a class="btn" href="https://www.poweron.swiss" target="_blank" rel="noopener">15-Minuten-Check – poweron.swiss</a>
|
||||||
|
<a class="btn btn-secondary" href="mailto:info@poweron.swiss?subject=48h%20AI%20Sprint%20%E2%80%93%2015-Minuten-Check">E-Mail</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer class="legal">
|
||||||
|
<div class="wrap">
|
||||||
|
<p><strong>PowerOn AG</strong> · Birmensdorferstrasse 94, 8003 Zuerich · <a href="https://www.poweron.swiss">www.poweron.swiss</a></p>
|
||||||
|
<p>Fliesstext und Vertragsdetails: siehe <a href="./poweron-launch48-offer.md">poweron-launch48-offer.md</a> (Markdown).</p>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
52
docs/poweron-ki-betriebssystem-prompts.md
Normal file
52
docs/poweron-ki-betriebssystem-prompts.md
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
# Prompts: KI-Betriebssystem-Infografik (Google Bild-KI / Gemini)
|
||||||
|
|
||||||
|
Für die Generierung einer alternativen oder verfeinerten Visualisierung in **Google AI Studio**, der **Gemini-App** (Bildfunktion) oder einem vergleichbaren Angebot (z. B. Imagen). Modell im UI wählen (intern manchmal anders benannt).
|
||||||
|
|
||||||
|
**Zugehörige Code-Folie (HTML, 1920×1080):** [poweron-ki-betriebssystem-slide.html](./poweron-ki-betriebssystem-slide.html)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Master-Prompt (ein Bild, gesamte Folie)
|
||||||
|
|
||||||
|
```
|
||||||
|
Professional German B2B infographic slide, 16:9 landscape, 1920x1080. Title at top: "Das moderne KI-Betriebssystem" with subtitle "PowerOn". Light grey background (#f8fafc).
|
||||||
|
|
||||||
|
Left: flat-design rocket pointing up, five horizontal colored segments from top to bottom: dark blue, medium blue, light blue-grey, cream, orange flame at bottom; small dark fins on sides of cream section. White line icons centered in each segment: dashboard gauge; server with nodes; hand holding gear; crossed wrench and screwdriver; document with magnifying glass. Labels on rocket segments in German: Interface, Orchestrierung, Skills, Modelle, Daten.
|
||||||
|
|
||||||
|
Center: five stacked white rounded cards with left color tabs matching rocket segments. Each card: bold German title, subtitle, two bullet lines with arrow symbols. Content exactly:
|
||||||
|
(1) Interface Layer (Interaktion) — Einstiegspunkt für User & Systeme — Chat, Spracheingabe, Oberflächen; API & Webhooks
|
||||||
|
(2) Orchestrierung & Agenten — Entscheiden & Abläufe planen — Aufgaben delegieren; Skills & Tools koordinieren
|
||||||
|
(3) Skill- & Tool-Layer — Ausführung konkreter Aufgaben — Prozesse, Aktionen, Integration; API-Aufrufe, Funktionen & Automationen
|
||||||
|
(4) KI-Modelle — Spezialisierte Modelle — Generierung, Analyse & Klassifikation; Ausführung einzelner Denkschritte
|
||||||
|
(5) Daten- & Kontextschicht — Dokumente & Wissen — Vektordatenbanken; Retrieval & Historien
|
||||||
|
|
||||||
|
Ribbon connectors between rocket segments and cards with subtle folded ribbon 3D effect.
|
||||||
|
|
||||||
|
Far left vertical bar: vertical text "Regeln & Steuerung", subtext horizontal small: Zugriffsrechte & Rollen, Entscheidungsgrenzen, Validierung & Freigaben. Far right vertical bar: vertical text "Transparenz & Kontrolle", subtext: Nachvollziehbarkeit, Qualitätssicherung, Kosten- und Nutzungsübersicht. Dark blue caps on bars.
|
||||||
|
|
||||||
|
Typography: clean geometric sans-serif, high legibility, no watermark, no stock photo people, no clutter.
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Kurz-Prompt (Iteration / Stil-Fix)
|
||||||
|
|
||||||
|
```
|
||||||
|
Same layout as a McKinsey-style architecture infographic: rocket left, five layered segments, five matching explanation cards center, two slim governance columns with vertical German labels. Colors: corporate blues #12579b and #1976d2, light grey background, orange accent only for data layer flame. Flat vector, crisp edges, presentation-ready.
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Negativ / Vermeiden (an den Bild-Prompt anhängen)
|
||||||
|
|
||||||
|
```
|
||||||
|
Avoid: 3D photorealistic rocket, cartoon style, low resolution, illegible micro-text, English-only text, logos of OpenAI/Google/Microsoft, busy backgrounds, isometric clutter, more than five main layers.
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Text-Prompt für Gemini (Copy-Feinschliff, kein Bild)
|
||||||
|
|
||||||
|
```
|
||||||
|
Du bist Redakteur für ein deutschsprachiges Enterprise-Pitchdeck. Überprüfe die fünf Schichten eines "KI-Betriebssystems" (Interface, Orchestrierung/Agenten, Skills/Tools, Modelle, Daten/Kontext) plus die beiden Querschnittsthemen Regeln & Steuerung und Transparenz & Kontrolle. Schlage je Schicht maximal zwei prägnante Bulletpoints vor (jeweils unter 90 Zeichen), konsistent mit PowerOn-Messaging (Plattform, Governance, schnelle Lieferung, Auditierbarkeit). Gib nur die optimierte Liste aus, keine Einleitung.
|
||||||
|
```
|
||||||
467
docs/poweron-ki-betriebssystem-slide.html
Normal file
467
docs/poweron-ki-betriebssystem-slide.html
Normal file
|
|
@ -0,0 +1,467 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="de-CH">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=1920">
|
||||||
|
<title>PowerOn – Das moderne KI-Betriebssystem (16:9)</title>
|
||||||
|
<style>
|
||||||
|
:root {
|
||||||
|
--po-blue: #1976d2;
|
||||||
|
--po-blue-dark: #12579b;
|
||||||
|
--po-seg-light: #b8d4f0;
|
||||||
|
--po-seg-cream: #f0ebe3;
|
||||||
|
--po-flame: #f57c00;
|
||||||
|
--po-flame-light: #ff9800;
|
||||||
|
--text: #1a1a2e;
|
||||||
|
--text-muted: #5c5c6f;
|
||||||
|
--bg: #f8fafc;
|
||||||
|
--card: #ffffff;
|
||||||
|
--icon: rgba(255, 255, 255, 0.95);
|
||||||
|
--icon-dark: #12579b;
|
||||||
|
}
|
||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: #e2e8f0;
|
||||||
|
font-family: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.stage {
|
||||||
|
width: 1920px;
|
||||||
|
height: 1080px;
|
||||||
|
background: var(--bg);
|
||||||
|
overflow: hidden;
|
||||||
|
flex-shrink: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
padding: 28px 36px 32px;
|
||||||
|
box-shadow: 0 8px 40px rgba(0, 0, 0, 0.12);
|
||||||
|
}
|
||||||
|
.slide-header {
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 18px;
|
||||||
|
}
|
||||||
|
.slide-header h1 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 2.05rem;
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
}
|
||||||
|
.slide-header p {
|
||||||
|
margin: 6px 0 0;
|
||||||
|
font-size: 1.15rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--po-blue);
|
||||||
|
}
|
||||||
|
.slide-grid {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 78px 210px 46px minmax(0, 1fr) 78px;
|
||||||
|
grid-template-rows: repeat(5, minmax(0, 1fr));
|
||||||
|
gap: 0 0;
|
||||||
|
column-gap: 0;
|
||||||
|
}
|
||||||
|
/* Governance columns */
|
||||||
|
.gov {
|
||||||
|
grid-row: 1 / -1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: var(--card);
|
||||||
|
border-radius: 10px;
|
||||||
|
border: 1px solid #e2e8f0;
|
||||||
|
box-shadow: 0 2px 12px rgba(18, 87, 155, 0.06);
|
||||||
|
padding: 12px 8px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.gov::before,
|
||||||
|
.gov::after {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 4px;
|
||||||
|
right: 4px;
|
||||||
|
height: 10px;
|
||||||
|
background: var(--po-blue-dark);
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
.gov::before { top: 8px; }
|
||||||
|
.gov::after { bottom: 8px; }
|
||||||
|
.gov-left { grid-column: 1; }
|
||||||
|
.gov-right { grid-column: 5; }
|
||||||
|
.gov-title {
|
||||||
|
writing-mode: vertical-rl;
|
||||||
|
transform: rotate(180deg);
|
||||||
|
font-size: 0.95rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
text-align: center;
|
||||||
|
flex: 0 0 auto;
|
||||||
|
max-height: 62%;
|
||||||
|
}
|
||||||
|
.gov-sub {
|
||||||
|
font-size: 0.62rem;
|
||||||
|
line-height: 1.35;
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-align: center;
|
||||||
|
padding: 10px 2px 0;
|
||||||
|
writing-mode: horizontal-tb;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
/* Per-row cells: col 2 = rocket tier, col 3 = ribbon, col 4 = card */
|
||||||
|
.rocket-tier {
|
||||||
|
grid-column: 2;
|
||||||
|
position: relative;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin: 0 8px;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.rocket-tier .tier-body {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
min-height: 72px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-interface .tier-body {
|
||||||
|
background: var(--po-blue-dark);
|
||||||
|
border-radius: 14px 14px 0 0;
|
||||||
|
margin-top: 38px;
|
||||||
|
}
|
||||||
|
.rocket-nose {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-left: 52px solid transparent;
|
||||||
|
border-right: 52px solid transparent;
|
||||||
|
border-bottom: 42px solid var(--po-blue-dark);
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-interface { align-items: flex-end; padding-top: 0; }
|
||||||
|
.rocket-tier.t-interface .wrap { position: relative; width: 100%; height: 100%; display: flex; flex-direction: column; align-items: center; }
|
||||||
|
.rocket-tier.t-orch .tier-body { background: var(--po-blue); }
|
||||||
|
.rocket-tier.t-skills .tier-body { background: var(--po-seg-light); }
|
||||||
|
.rocket-tier.t-skills .tier-label { color: var(--po-blue-dark); text-shadow: none; }
|
||||||
|
.rocket-tier.t-models .tier-body {
|
||||||
|
background: var(--po-seg-cream);
|
||||||
|
border-radius: 0 0 6px 6px;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 8px;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-style: solid;
|
||||||
|
z-index: 0;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin-l {
|
||||||
|
left: -20px;
|
||||||
|
border-width: 0 22px 56px 0;
|
||||||
|
border-color: transparent var(--po-blue-dark) transparent transparent;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin-r {
|
||||||
|
right: -20px;
|
||||||
|
border-width: 0 0 56px 22px;
|
||||||
|
border-color: transparent transparent transparent var(--po-blue-dark);
|
||||||
|
}
|
||||||
|
.rocket-tier.t-data .tier-body {
|
||||||
|
background: linear-gradient(180deg, var(--po-flame-light) 0%, var(--po-flame) 100%);
|
||||||
|
clip-path: polygon(15% 0%, 85% 0%, 100% 100%, 50% 85%, 0% 100%);
|
||||||
|
min-height: 64px;
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
.tier-label {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 6px;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.65rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: rgba(255, 255, 255, 0.92);
|
||||||
|
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .tier-label,
|
||||||
|
.rocket-tier.t-data .tier-label { color: var(--po-blue-dark); text-shadow: none; }
|
||||||
|
.rocket-tier.t-data .tier-label { color: #fff; bottom: 10px; }
|
||||||
|
/* Ribbons */
|
||||||
|
.ribbon {
|
||||||
|
grid-column: 3;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: flex-start;
|
||||||
|
padding-left: 2px;
|
||||||
|
}
|
||||||
|
.ribbon-inner {
|
||||||
|
width: 100%;
|
||||||
|
height: 72%;
|
||||||
|
min-height: 48px;
|
||||||
|
position: relative;
|
||||||
|
background: linear-gradient(180deg, rgba(255, 255, 255, 0.5) 0%, rgba(0, 0, 0, 0.04) 100%);
|
||||||
|
transform: skewY(-2deg);
|
||||||
|
border-radius: 0 4px 4px 0;
|
||||||
|
box-shadow: inset -2px 0 4px rgba(0, 0, 0, 0.06), 2px 2px 6px rgba(18, 87, 155, 0.08);
|
||||||
|
}
|
||||||
|
.ribbon-inner::after {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
bottom: 0;
|
||||||
|
width: 6px;
|
||||||
|
border-radius: 2px;
|
||||||
|
}
|
||||||
|
.ribbon.row-1 .ribbon-inner::after { background: var(--po-blue-dark); }
|
||||||
|
.ribbon.row-2 .ribbon-inner::after { background: var(--po-blue); }
|
||||||
|
.ribbon.row-3 .ribbon-inner::after { background: var(--po-seg-light); }
|
||||||
|
.ribbon.row-4 .ribbon-inner::after { background: #c4b8a8; }
|
||||||
|
.ribbon.row-5 .ribbon-inner::after { background: var(--po-flame); }
|
||||||
|
/* Cards */
|
||||||
|
.layer-card {
|
||||||
|
display: flex;
|
||||||
|
align-items: stretch;
|
||||||
|
margin: 4px 0 4px 10px;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.layer-card .card-shell {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
background: var(--card);
|
||||||
|
border-radius: 12px;
|
||||||
|
border: 1px solid #e2e8f0;
|
||||||
|
box-shadow: 0 2px 14px rgba(25, 118, 210, 0.07);
|
||||||
|
overflow: hidden;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.layer-card .tab {
|
||||||
|
width: 14px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.layer-card.row-1 .tab { background: var(--po-blue-dark); }
|
||||||
|
.layer-card.row-2 .tab { background: var(--po-blue); }
|
||||||
|
.layer-card.row-3 .tab { background: var(--po-seg-light); }
|
||||||
|
.layer-card.row-4 .tab { background: #c4b8a8; }
|
||||||
|
.layer-card.row-5 .tab { background: linear-gradient(180deg, var(--po-flame-light), var(--po-flame)); }
|
||||||
|
.layer-card .card-body {
|
||||||
|
padding: 10px 16px 10px 14px;
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
.layer-card h3 {
|
||||||
|
margin: 0 0 2px;
|
||||||
|
font-size: 0.98rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
line-height: 1.2;
|
||||||
|
}
|
||||||
|
.layer-card .sub {
|
||||||
|
margin: 0 0 6px;
|
||||||
|
font-size: 0.78rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
.layer-card ul {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
.layer-card li {
|
||||||
|
font-size: 0.76rem;
|
||||||
|
line-height: 1.4;
|
||||||
|
color: var(--text);
|
||||||
|
padding-left: 0.85em;
|
||||||
|
text-indent: -0.85em;
|
||||||
|
}
|
||||||
|
.layer-card li + li { margin-top: 2px; }
|
||||||
|
/* Row placement (row-N on same element as component) */
|
||||||
|
.rocket-tier.row-1, .ribbon.row-1, .layer-card.row-1 { grid-row: 1; }
|
||||||
|
.rocket-tier.row-2, .ribbon.row-2, .layer-card.row-2 { grid-row: 2; }
|
||||||
|
.rocket-tier.row-3, .ribbon.row-3, .layer-card.row-3 { grid-row: 3; }
|
||||||
|
.rocket-tier.row-4, .ribbon.row-4, .layer-card.row-4 { grid-row: 4; }
|
||||||
|
.rocket-tier.row-5, .ribbon.row-5, .layer-card.row-5 { grid-row: 5; }
|
||||||
|
.rocket-tier { grid-column: 2; }
|
||||||
|
.ribbon { grid-column: 3; }
|
||||||
|
.layer-card { grid-column: 4; }
|
||||||
|
/* SVG icons */
|
||||||
|
.tier-icon { width: 44px; height: 44px; color: var(--icon); }
|
||||||
|
.rocket-tier.t-models .tier-icon,
|
||||||
|
.rocket-tier.t-skills .tier-icon { color: var(--icon-dark); }
|
||||||
|
.rocket-tier.t-data .tier-icon { color: #fff; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="stage" role="img" aria-label="Infografik: Das moderne KI-Betriebssystem PowerOn mit fünf Schichten und Governance-Säulen">
|
||||||
|
<header class="slide-header">
|
||||||
|
<h1>Das moderne KI-Betriebssystem</h1>
|
||||||
|
<p>PowerOn</p>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div class="slide-grid">
|
||||||
|
<aside class="gov gov-left">
|
||||||
|
<div class="gov-title">Regeln & Steuerung</div>
|
||||||
|
<div class="gov-sub">Zugriffsrechte & Rollen, Entscheidungsgrenzen, Validierung & Freigaben</div>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
<!-- Row 1: Interface -->
|
||||||
|
<div class="row-1 rocket-tier t-interface">
|
||||||
|
<div class="wrap" style="width:100%;height:100%;">
|
||||||
|
<div class="rocket-nose" aria-hidden="true"></div>
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<circle cx="24" cy="24" r="16" stroke-opacity="0.35"/>
|
||||||
|
<path d="M24 12 v8 M24 28 v8 M12 24 h8 M28 24 h8"/>
|
||||||
|
<circle cx="24" cy="24" r="6"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Interface</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-1 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-1 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Interface Layer (Interaktion)</h3>
|
||||||
|
<p class="sub">Einstiegspunkt für User & Systeme</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Chat, Spracheingabe, Oberflächen</li>
|
||||||
|
<li>➔ API & Webhooks</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 2: Orchestrierung -->
|
||||||
|
<div class="row-2 rocket-tier t-orch">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<rect x="8" y="22" width="18" height="16" rx="2"/>
|
||||||
|
<path d="M26 26 h10 M26 30 h10 M26 34 h10"/>
|
||||||
|
<circle cx="38" cy="18" r="5"/>
|
||||||
|
<path d="M33 22 L36 20 M26 22 L22 18"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Orchestrierung</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-2 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-2 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Orchestrierung & Agenten</h3>
|
||||||
|
<p class="sub">Entscheiden & Abläufe planen</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Aufgaben delegieren</li>
|
||||||
|
<li>➔ Skills & Tools koordinieren</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 3: Skills -->
|
||||||
|
<div class="row-3 rocket-tier t-skills">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<path d="M14 32 c8 -4 12 -12 12 -20 c0 -4 -2 -6 -5 -6 c-4 0 -7 4 -7 10 c0 6 4 10 10 10 z"/>
|
||||||
|
<circle cx="30" cy="22" r="9"/>
|
||||||
|
<path d="M30 16 v12 M24 22 h12"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Skills</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-3 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-3 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Skill- & Tool-Layer</h3>
|
||||||
|
<p class="sub">Ausführung konkreter Aufgaben</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Prozesse, Aktionen, Integration</li>
|
||||||
|
<li>➔ API-Aufrufe, Funktionen & Automationen</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 4: Modelle -->
|
||||||
|
<div class="row-4 rocket-tier t-models">
|
||||||
|
<span class="fin fin-l" aria-hidden="true"></span>
|
||||||
|
<span class="fin fin-r" aria-hidden="true"></span>
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<path d="M12 38 L22 10 L38 38 Z M18 28 h14"/>
|
||||||
|
<line x1="26" y1="18" x2="32" y2="32"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Modelle</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-4 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-4 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>KI-Modelle</h3>
|
||||||
|
<p class="sub">Spezialisierte Modelle</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Generierung, Analyse & Klassifikation</li>
|
||||||
|
<li>➔ Ausführung einzelner „Denkschritte“</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 5: Daten -->
|
||||||
|
<div class="row-5 rocket-tier t-data">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<rect x="10" y="8" width="22" height="28" rx="2"/>
|
||||||
|
<line x1="14" y1="16" x2="28" y2="16"/>
|
||||||
|
<line x1="14" y1="22" x2="26" y2="22"/>
|
||||||
|
<circle cx="34" cy="30" r="9"/>
|
||||||
|
<line x1="40" y1="36" x2="44" y2="40" stroke-linecap="round"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Daten</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-5 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-5 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Daten- & Kontextschicht</h3>
|
||||||
|
<p class="sub">Dokumente & Wissen</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Vektordatenbanken</li>
|
||||||
|
<li>➔ Retrieval & Historien</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<aside class="gov gov-right">
|
||||||
|
<div class="gov-title">Transparenz & Kontrolle</div>
|
||||||
|
<div class="gov-sub">Nachvollziehbarkeit, Qualitätssicherung, Kosten- und Nutzungsübersicht</div>
|
||||||
|
</aside>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
176
docs/poweron-launch48-offer.md
Normal file
176
docs/poweron-launch48-offer.md
Normal file
|
|
@ -0,0 +1,176 @@
|
||||||
|
# PowerOn Launch48
|
||||||
|
**Ihre erste produktive KI-Loesung auf der PowerOn-Plattform – in 48 Stunden.**
|
||||||
|
|
||||||
|
*Zum Weitergeben an Kundinnen und Kunden. Verstaendlich fuer Geschaeftsfuehrung, Fachbereiche und IT.*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Warum viele Unternehmen bei KI noch zoegern
|
||||||
|
|
||||||
|
Daten liegen heute oft **verteilt**: in Ordnern, Mails, Ticketsystemen und Fachapplikationen. Gleichzeitig wuenschen sich Teams **schnellere Antworten** und **weniger manuelle Routine**.
|
||||||
|
|
||||||
|
Viele erste KI-Versuche scheitern nicht an der Technik allein, sondern daran, dass
|
||||||
|
|
||||||
|
- **kein klarer Anwendungsfall** im Fokus steht,
|
||||||
|
- **wichtige Unterlagen** nicht sicher und gezielt genutzt werden,
|
||||||
|
- **generische Chat-Tools** ohne Freigaben genutzt werden – mit Risiko fuer Datenschutz und Qualitaet,
|
||||||
|
- **lange Vorprojekte** geplant werden, bevor ueberhaupt etwas Greifbares entsteht.
|
||||||
|
|
||||||
|
**PowerOn Launch48** ist das Gegenteil davon: ein **fokussiertes Paket** mit klarem Ablauf, **Fixpreis** und einem **konkreten Ergebnis** auf **Ihrer** PowerOn-Umgebung.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Was PowerOn ist – in einem Satz
|
||||||
|
|
||||||
|
**PowerOn** ist eine **Unternehmensplattform fuer kuenstliche Intelligenz**: Teams arbeiten mit KI **dort, wo Ihre Informationen und Prozesse ohnehin sind** – mit klaren Rollen, nachvollziehbaren Ablaeufen und ohne dass Sie die Kontrolle ueber sensible Inhalte verlieren.
|
||||||
|
|
||||||
|
Mehr zur Plattform: [product-teaser-poweron.md](./product-teaser-poweron.md) (interne Vertiefung).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Was Sie mit Launch48 bekommen
|
||||||
|
|
||||||
|
**Am Ende steht keine Theorie, sondern etwas Greifbares:** eine **einsatznahe KI-Loesung** auf der **PowerOn-Plattform** – typischerweise Ihr erster **KI-Assistent** fuer **einen** klar abgegrenzten Prozess, mit **Ihren freigegebenen Daten** und – im vereinbarten Rahmen – einer **Systemanbindung**. Details zum Ablauf folgen im naechsten Abschnitt.
|
||||||
|
|
||||||
|
Damit koennen Sie **realistisch einschaetzen**, welchen Nutzen KI in **Ihrem** Unternehmen bringt – und darauf aufbauen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Der Ablauf: vier Phasen, 48 Stunden, ein gemeinsames Team
|
||||||
|
|
||||||
|
**Kopfzeile (z. B. fuer Praesentation / Folie):** *Vier Phasen · 48 Stunden · gemeinsam mit Ihrem Team*
|
||||||
|
|
||||||
|
**Subline:** *Ein Workspace. Ihre Daten. Die passenden KI-Faehigkeiten. Gebündelt auf PowerOn.*
|
||||||
|
|
||||||
|
Die **48 Stunden** sind der **konzentrierte Umsetzungsblock**. Davor liegt eine **kurze Vorbereitung** (Discovery, Architektur, Freigaben); danach ein **Pilot** mit ausgewaehlten Nutzerinnen und Nutzern, in dem wir die **vereinbarten Erfolgsziele** messen.
|
||||||
|
|
||||||
|
### Phase 1: Discovery
|
||||||
|
|
||||||
|
**Gemeinsame Analyse Ihrer Prozesse.** Wir identifizieren den Anwendungsfall mit dem groessten **Automatisierungs- bzw. Entlastungspotenzial** – so, dass er **messbar** und **in 48 Stunden realistisch** umsetzbar ist (z. B. wiederkehrende Anfragen, Erstbearbeitungen, standardisierte Pruefschritte).
|
||||||
|
|
||||||
|
**Ergebnis:** Ein **scharf umrissener Use-Case** und klare Erwartungen.
|
||||||
|
|
||||||
|
### Phase 2: Design und Architektur
|
||||||
|
|
||||||
|
**KI-Architektur, Datenmodell und Integration auf der PowerOn-Plattform.** Wir legen fest, **welche Datenquellen** und **welche Anbindung** im Paketrahmen vorgesehen sind. Zentral: die **messbaren Erfolgsziele**, die **vor dem Start schriftlich fixiert** werden und ueber die **zweite Zahlungsstufe** (CHF 7’000) entscheiden – damit Einkauf, Fachbereich und IT dieselbe Sprache sprechen.
|
||||||
|
|
||||||
|
**Ergebnis:** **Fester Plan** fuer Umsetzung und Abnahme.
|
||||||
|
|
||||||
|
### Phase 3: Build und Integration
|
||||||
|
|
||||||
|
**Entwicklung des KI-Assistenten, Anbindung an Ihre Systeme (im Vereinbarten), Testing.** Ihre **Fachanwenderinnen und -anwender** pruefen parallel zum Build mit (**„Mensch prueft mit“** statt reiner Black-Box) – mit realistischen **Testfaellen aus dem Alltag**.
|
||||||
|
|
||||||
|
**Ergebnis:** Stabile, einsatznahe Loesung vor dem Go-Live.
|
||||||
|
|
||||||
|
### Phase 4: Deploy und Handover
|
||||||
|
|
||||||
|
**Go-Live in Ihrer vereinbarten PowerOn-Umgebung** (Pilot- oder Produktions-Instanz je nach Vereinbarung – kein generisches „Internet-KI-Experiment“), **Wissenstransfer** und **Dokumentation**, damit Ihr Team den Assistenten **vom ersten Tag an** im vereinbarten Rahmen **selbst betreiben** kann.
|
||||||
|
|
||||||
|
**Ergebnis:** Uebergabe mit kurzer **Einweisung** und **Nachschlagewerk** fuer den Betrieb.
|
||||||
|
|
||||||
|
### Zeitlicher Ablauf auf einen Blick
|
||||||
|
|
||||||
|
| Phase | Inhalt |
|
||||||
|
| --- | --- |
|
||||||
|
| **Vorbereitung** | Discovery, Design/Architektur, Freigaben – in der Regel **einige Arbeitstage** vor dem 48h-Block |
|
||||||
|
| **Umsetzung** | **48 Stunden** intensiv gemeinsam |
|
||||||
|
| **Pilot** | z. B. **ca. 10 Arbeitstage** Messfenster fuer die vereinbarten Erfolgsziele (wie im Angebot festgelegt) |
|
||||||
|
|
||||||
|
### Am Ende liegen fuer Sie vor (Kernlieferobjekte)
|
||||||
|
|
||||||
|
- eine **funktionierende KI-Loesung** auf PowerOn fuer den **definierten** Anwendungsfall,
|
||||||
|
- **konfigurierte Datenquellen** und **eine** Systemanbindung **im vereinbarten Umfang**,
|
||||||
|
- **kurze Dokumentation** und **Einweisung** fuer Ihr Team.
|
||||||
|
|
||||||
|
### Rollen: Sie und wir
|
||||||
|
|
||||||
|
| | **Sie** | **Wir** |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **Verantwortung** | Fach-Owner, IT-Zugang, Freigaben (Datenschutz/Compliance nach Bedarf), **Pilotgruppe** | Architektur, Umsetzung, Qualitaet, Begleitung im 48h-Block |
|
||||||
|
|
||||||
|
**Vertrauen in einem Satz:** Kein undurchsichtiges Einzel-Tool im Browser – sondern **PowerOn** mit **Ihren freigegebenen Daten** und **klaren Grenzen**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Fuer wen ist Launch48 gedacht?
|
||||||
|
|
||||||
|
Launch48 richtet sich an Organisationen, die **wissen, dass KI relevant ist**, aber **noch keinen einfachen Weg** gefunden haben, **schnell und kontrolliert** zu starten – oder die **bereits eine Idee** haben und diese **in Wochen, nicht Monaten** greifbar machen wollen.
|
||||||
|
|
||||||
|
**Typische Situationen:**
|
||||||
|
|
||||||
|
- Viele **wiederkehrende Anfragen** (Kundenservice, interne Support-Themen, Fachfragen).
|
||||||
|
- **Wissen in Dokumenten**, das immer wieder neu gesucht und zusammengefasst wird.
|
||||||
|
- **Bedarf an Geschwindigkeit** ohne monatelange Evaluationsprojekte.
|
||||||
|
- **Wuensche nach Kontrolle** ueber Daten und Rollen statt „KI irgendwo im Browser“.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Investition – einfach und planbar
|
||||||
|
|
||||||
|
| | Betrag | Wann |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| **Gesamtpaket** | **CHF 9’000** (zzgl. MWSt. falls anwendbar) | Fixpreis fuer das vereinbarte Paket |
|
||||||
|
| **Zu Beginn** | **CHF 2’000** | Wenn Sie starten und wir die Umsetzung freigeben |
|
||||||
|
| **Nach dem Pilot** | **CHF 7’000** | Wenn die **gemeinsam festgelegten Erfolgsziele** im vereinbarten Messzeitraum erreicht sind |
|
||||||
|
|
||||||
|
**Was bedeutet das fuer Sie?** Der Preis ist bewusst **frueh transparent**, weil Launch48 kein offenes Beratungsprojekt ist, sondern ein **klar abgegrenztes Paket**. Sie investieren zu Beginn einen **kleineren Teil**. Der groessere Teil ist an **messbare, vorab beschriebene Ziele** geknuepft – z. B. Zeitersparnis pro typischem Vorgang, Zufriedenheit der Pilotgruppe oder Fehlerquote. **Genau diese Ziele** legen wir **vor dem Start** schriftlich fest, damit alle dasselbe verstehen.
|
||||||
|
|
||||||
|
So wird aus der Zahl kein Risikozeichen, sondern ein **Vertrauenssignal**: klarer Rahmen, klares Ergebnis, klare Abnahme. Details und Grenzen des Pakets besprechen wir **transparent** im Erstgespraech (Umfang der Datenquellen, eine Systemanbindung im Standardrahmen, Groesse der Pilotgruppe).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Was Sie von uns erwarten koennen
|
||||||
|
|
||||||
|
- **Erfahrene Begleitung** von Anfang bis Pilotende
|
||||||
|
- **Klare Kommunikation** – wenig Buzzwords, viel Nutzen
|
||||||
|
- **PowerOn als Plattform** – skalierbar, wenn Sie verlaengern moechten
|
||||||
|
- **Respekt vor Ihren Freigaben** – Datenschutz und IT-Security ernst nehmen
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ihr naechster Schritt
|
||||||
|
|
||||||
|
**Kurzes Erstgespraech (ca. 15–30 Minuten):** Passt Ihr Thema zu Launch48? Wir sagen Ihnen ehrlich **ja, nein oder noch nicht**.
|
||||||
|
|
||||||
|
**Kontakt**
|
||||||
|
|
||||||
|
- **Web:** [www.poweron.swiss](https://www.poweron.swiss)
|
||||||
|
- **Adresse:** PowerOn AG, Birmensdorferstrasse 94, 8003 Zuerich, Schweiz
|
||||||
|
|
||||||
|
**Ansprechpartner**
|
||||||
|
|
||||||
|
- Patrick Motsch
|
||||||
|
- Ida Dittrich
|
||||||
|
- Stephan Schellworth
|
||||||
|
|
||||||
|
*Bitte ersetzen Sie bei Bedarf durch eine zentrale E-Mail-Adresse oder Buchungslink fuer Ihr Vertriebsteam.*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hauefige Fragen (kurz)
|
||||||
|
|
||||||
|
**Brauchen wir schon PowerOn?**
|
||||||
|
Wir klaeren mit Ihnen, ob eine **Pilot-Umgebung** oder Ihre bestehende Instanz passt.
|
||||||
|
|
||||||
|
**Ist das nur ein Prototyp?**
|
||||||
|
Nein – Ziel ist eine **einsatznahe Loesung** fuer einen **definierten** Anwendungsfall. Was **nicht** im Paket liegt (z. B. Rollout auf die ganze Firma), sagen wir klar dazu.
|
||||||
|
|
||||||
|
**Was, wenn unsere IT Zeit braucht?**
|
||||||
|
Dann verschieben wir den Start – **Zugang und Freigaben** muessen passen, sonst wird niemand gluecklich.
|
||||||
|
|
||||||
|
**Duerfen wir das Dokument weitergeben?**
|
||||||
|
Ja. Es ist dafuer gedacht, intern weiterzureichen (Geschaeftsfuehrung, Fachbereich, IT).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Weitere Unterlagen (optional)
|
||||||
|
|
||||||
|
- **Onepager im Browser (HTML, teilbar):** [launch48-offer-page.html](./launch48-offer-page.html)
|
||||||
|
- **4-Folien-Deck (Copy fuer PDF/Canva/PPT):** [launch48-deck-presentation.md](./launch48-deck-presentation.md)
|
||||||
|
- **Kurzfassung zum Drucken:** [flyer-poweron-48h-agent.md](./flyer-poweron-48h-agent.md)
|
||||||
|
- **Technisches Vertiefungs- und Lieferkonzept (intern):** [concept-poweron-48h-agent-offer.md](./concept-poweron-48h-agent-offer.md)
|
||||||
|
- **Beispiel-Verlauf (Illustration, kein Echt-Kunde):** [case-study-poweron-48h-agent.md](./case-study-poweron-48h-agent.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*PowerOn Launch48 – strukturiert vorbereitet, in 48 Stunden umgesetzt, messbar abgeschlossen.*
|
||||||
529
docs/poweron-plattform-layer-schaubild.html
Normal file
529
docs/poweron-plattform-layer-schaubild.html
Normal file
|
|
@ -0,0 +1,529 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="de-CH">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta name="viewport" content="width=1920">
|
||||||
|
<title>PowerOn – Die KI-Plattform (Layer-Schaubild, 16:9)</title>
|
||||||
|
<style>
|
||||||
|
:root {
|
||||||
|
--po-blue: #1976d2;
|
||||||
|
--po-blue-dark: #12579b;
|
||||||
|
--po-industry: #4a8ec8;
|
||||||
|
--po-seg-light: #b8d4f0;
|
||||||
|
--po-seg-cream: #f0ebe3;
|
||||||
|
--po-flame: #f57c00;
|
||||||
|
--po-flame-light: #ff9800;
|
||||||
|
--text: #1a1a2e;
|
||||||
|
--text-muted: #5c5c6f;
|
||||||
|
--bg: #f8fafc;
|
||||||
|
--card: #ffffff;
|
||||||
|
--icon: rgba(255, 255, 255, 0.95);
|
||||||
|
--icon-dark: #12579b;
|
||||||
|
}
|
||||||
|
*, *::before, *::after { box-sizing: border-box; }
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: #e2e8f0;
|
||||||
|
font-family: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
|
||||||
|
color: var(--text);
|
||||||
|
}
|
||||||
|
.stage {
|
||||||
|
width: 1920px;
|
||||||
|
height: 1080px;
|
||||||
|
background: var(--bg);
|
||||||
|
overflow: hidden;
|
||||||
|
flex-shrink: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
padding: 20px 32px 24px;
|
||||||
|
box-shadow: 0 8px 40px rgba(0, 0, 0, 0.12);
|
||||||
|
}
|
||||||
|
.slide-header {
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
.brand-row {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 16px;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
.brand-mark {
|
||||||
|
font-size: 0.95rem;
|
||||||
|
font-weight: 800;
|
||||||
|
letter-spacing: 0.22em;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
text-transform: uppercase;
|
||||||
|
border: 2px solid var(--po-blue-dark);
|
||||||
|
padding: 6px 14px 6px 18px;
|
||||||
|
border-radius: 4px;
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
.slide-header h1 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 1.85rem;
|
||||||
|
font-weight: 700;
|
||||||
|
letter-spacing: -0.02em;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
}
|
||||||
|
.slide-header .tagline {
|
||||||
|
margin: 4px 0 0;
|
||||||
|
font-size: 0.98rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--po-blue);
|
||||||
|
max-width: 920px;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
line-height: 1.35;
|
||||||
|
}
|
||||||
|
.slide-grid {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 82px 200px 42px minmax(0, 1fr) 82px;
|
||||||
|
grid-template-rows: repeat(6, minmax(0, 1fr));
|
||||||
|
gap: 0;
|
||||||
|
}
|
||||||
|
.gov {
|
||||||
|
grid-row: 1 / -1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: var(--card);
|
||||||
|
border-radius: 10px;
|
||||||
|
border: 1px solid #e2e8f0;
|
||||||
|
box-shadow: 0 2px 12px rgba(18, 87, 155, 0.06);
|
||||||
|
padding: 10px 6px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.gov::before,
|
||||||
|
.gov::after {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 4px;
|
||||||
|
right: 4px;
|
||||||
|
height: 10px;
|
||||||
|
background: var(--po-blue-dark);
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
.gov::before { top: 8px; }
|
||||||
|
.gov::after { bottom: 8px; }
|
||||||
|
.gov-left { grid-column: 1; }
|
||||||
|
.gov-right { grid-column: 5; }
|
||||||
|
.gov-title {
|
||||||
|
writing-mode: vertical-rl;
|
||||||
|
transform: rotate(180deg);
|
||||||
|
font-size: 0.88rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
letter-spacing: 0.04em;
|
||||||
|
text-align: center;
|
||||||
|
flex: 0 0 auto;
|
||||||
|
max-height: 58%;
|
||||||
|
}
|
||||||
|
.gov-sub {
|
||||||
|
font-size: 0.6rem;
|
||||||
|
line-height: 1.34;
|
||||||
|
color: var(--text-muted);
|
||||||
|
text-align: center;
|
||||||
|
padding: 8px 2px 0;
|
||||||
|
writing-mode: horizontal-tb;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
.rocket-tier {
|
||||||
|
grid-column: 2;
|
||||||
|
position: relative;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin: 0 6px;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.rocket-tier .tier-body {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
min-height: 52px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-interface .tier-body {
|
||||||
|
background: var(--po-blue-dark);
|
||||||
|
border-radius: 12px 12px 0 0;
|
||||||
|
margin-top: 32px;
|
||||||
|
}
|
||||||
|
.rocket-nose {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-left: 48px solid transparent;
|
||||||
|
border-right: 48px solid transparent;
|
||||||
|
border-bottom: 36px solid var(--po-blue-dark);
|
||||||
|
z-index: 1;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-interface { align-items: flex-end; padding-top: 0; }
|
||||||
|
.rocket-tier.t-interface .wrap {
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-orch .tier-body { background: var(--po-blue); }
|
||||||
|
.rocket-tier.t-industry .tier-body { background: var(--po-industry); }
|
||||||
|
.rocket-tier.t-skills .tier-body { background: var(--po-seg-light); }
|
||||||
|
.rocket-tier.t-skills .tier-label { color: var(--po-blue-dark); text-shadow: none; }
|
||||||
|
.rocket-tier.t-models .tier-body {
|
||||||
|
background: var(--po-seg-cream);
|
||||||
|
border-radius: 0 0 6px 6px;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 6px;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
border-style: solid;
|
||||||
|
z-index: 0;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin-l {
|
||||||
|
left: -18px;
|
||||||
|
border-width: 0 20px 48px 0;
|
||||||
|
border-color: transparent var(--po-blue-dark) transparent transparent;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-models .fin-r {
|
||||||
|
right: -18px;
|
||||||
|
border-width: 0 0 48px 20px;
|
||||||
|
border-color: transparent transparent transparent var(--po-blue-dark);
|
||||||
|
}
|
||||||
|
.rocket-tier.t-data .tier-body {
|
||||||
|
background: linear-gradient(180deg, var(--po-flame-light) 0%, var(--po-flame) 100%);
|
||||||
|
clip-path: polygon(15% 0%, 85% 0%, 100% 100%, 50% 85%, 0% 100%);
|
||||||
|
min-height: 52px;
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
.tier-label {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 4px;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.58rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: rgba(255, 255, 255, 0.92);
|
||||||
|
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
pointer-events: none;
|
||||||
|
line-height: 1.1;
|
||||||
|
padding: 0 2px;
|
||||||
|
}
|
||||||
|
.rocket-tier.t-skills .tier-label,
|
||||||
|
.rocket-tier.t-models .tier-label { color: var(--po-blue-dark); text-shadow: none; }
|
||||||
|
.rocket-tier.t-data .tier-label { color: #fff; bottom: 8px; }
|
||||||
|
.ribbon {
|
||||||
|
grid-column: 3;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: flex-start;
|
||||||
|
padding-left: 2px;
|
||||||
|
}
|
||||||
|
.ribbon-inner {
|
||||||
|
width: 100%;
|
||||||
|
height: 70%;
|
||||||
|
min-height: 40px;
|
||||||
|
position: relative;
|
||||||
|
background: linear-gradient(180deg, rgba(255, 255, 255, 0.5) 0%, rgba(0, 0, 0, 0.04) 100%);
|
||||||
|
transform: skewY(-2deg);
|
||||||
|
border-radius: 0 4px 4px 0;
|
||||||
|
box-shadow: inset -2px 0 4px rgba(0, 0, 0, 0.06), 2px 2px 6px rgba(18, 87, 155, 0.08);
|
||||||
|
}
|
||||||
|
.ribbon-inner::after {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
bottom: 0;
|
||||||
|
width: 6px;
|
||||||
|
border-radius: 2px;
|
||||||
|
}
|
||||||
|
.ribbon.row-1 .ribbon-inner::after { background: var(--po-blue-dark); }
|
||||||
|
.ribbon.row-2 .ribbon-inner::after { background: var(--po-blue); }
|
||||||
|
.ribbon.row-3 .ribbon-inner::after { background: var(--po-industry); }
|
||||||
|
.ribbon.row-4 .ribbon-inner::after { background: var(--po-seg-light); }
|
||||||
|
.ribbon.row-5 .ribbon-inner::after { background: #c4b8a8; }
|
||||||
|
.ribbon.row-6 .ribbon-inner::after { background: var(--po-flame); }
|
||||||
|
.layer-card {
|
||||||
|
display: flex;
|
||||||
|
align-items: stretch;
|
||||||
|
margin: 2px 0 2px 8px;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.layer-card .card-shell {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
background: var(--card);
|
||||||
|
border-radius: 10px;
|
||||||
|
border: 1px solid #e2e8f0;
|
||||||
|
box-shadow: 0 2px 12px rgba(25, 118, 210, 0.07);
|
||||||
|
overflow: hidden;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
.layer-card .tab {
|
||||||
|
width: 12px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
.layer-card.row-1 .tab { background: var(--po-blue-dark); }
|
||||||
|
.layer-card.row-2 .tab { background: var(--po-blue); }
|
||||||
|
.layer-card.row-3 .tab { background: var(--po-industry); }
|
||||||
|
.layer-card.row-4 .tab { background: var(--po-seg-light); }
|
||||||
|
.layer-card.row-5 .tab { background: #c4b8a8; }
|
||||||
|
.layer-card.row-6 .tab { background: linear-gradient(180deg, var(--po-flame-light), var(--po-flame)); }
|
||||||
|
.layer-card .card-body {
|
||||||
|
padding: 6px 12px 6px 10px;
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
.layer-card h3 {
|
||||||
|
margin: 0 0 1px;
|
||||||
|
font-size: 0.92rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: var(--po-blue-dark);
|
||||||
|
line-height: 1.2;
|
||||||
|
}
|
||||||
|
.layer-card .sub {
|
||||||
|
margin: 0 0 4px;
|
||||||
|
font-size: 0.72rem;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-muted);
|
||||||
|
line-height: 1.25;
|
||||||
|
}
|
||||||
|
.layer-card ul {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
.layer-card li {
|
||||||
|
font-size: 0.71rem;
|
||||||
|
line-height: 1.35;
|
||||||
|
color: var(--text);
|
||||||
|
padding-left: 0.85em;
|
||||||
|
text-indent: -0.85em;
|
||||||
|
}
|
||||||
|
.layer-card li + li { margin-top: 1px; }
|
||||||
|
.rocket-tier.row-1, .ribbon.row-1, .layer-card.row-1 { grid-row: 1; }
|
||||||
|
.rocket-tier.row-2, .ribbon.row-2, .layer-card.row-2 { grid-row: 2; }
|
||||||
|
.rocket-tier.row-3, .ribbon.row-3, .layer-card.row-3 { grid-row: 3; }
|
||||||
|
.rocket-tier.row-4, .ribbon.row-4, .layer-card.row-4 { grid-row: 4; }
|
||||||
|
.rocket-tier.row-5, .ribbon.row-5, .layer-card.row-5 { grid-row: 5; }
|
||||||
|
.rocket-tier.row-6, .ribbon.row-6, .layer-card.row-6 { grid-row: 6; }
|
||||||
|
.rocket-tier { grid-column: 2; }
|
||||||
|
.ribbon { grid-column: 3; }
|
||||||
|
.layer-card { grid-column: 4; }
|
||||||
|
.tier-icon { width: 36px; height: 36px; color: var(--icon); }
|
||||||
|
.rocket-tier.t-industry .tier-icon { color: var(--icon); }
|
||||||
|
.rocket-tier.t-skills .tier-icon,
|
||||||
|
.rocket-tier.t-models .tier-icon { color: var(--icon-dark); }
|
||||||
|
.rocket-tier.t-data .tier-icon { color: #fff; }
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="stage" role="img" aria-label="Infografik: PowerOn KI-Plattform in sechs verständlichen Schichten für Entscheider">
|
||||||
|
<header class="slide-header">
|
||||||
|
<div class="brand-row">
|
||||||
|
<span class="brand-mark" aria-hidden="true">PowerOn</span>
|
||||||
|
</div>
|
||||||
|
<h1>Die PowerOn KI-Plattform</h1>
|
||||||
|
<p class="tagline">Eine Plattform für KI im Unternehmen – mit Kontrolle, klaren Kosten und Lösungen für echte Fachfragen.</p>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div class="slide-grid">
|
||||||
|
<aside class="gov gov-left">
|
||||||
|
<div class="gov-title">Sicherheit & Regeln</div>
|
||||||
|
<div class="gov-sub">Wer darf was?<br>Getrennt pro Kunde / Mandant<br>Sensible Daten schützen<br>DSGVO: Auskunft & Löschen</div>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
<!-- Row 1: Interface -->
|
||||||
|
<div class="row-1 rocket-tier t-interface">
|
||||||
|
<div class="wrap" style="width:100%;height:100%;">
|
||||||
|
<div class="rocket-nose" aria-hidden="true"></div>
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<circle cx="24" cy="24" r="16" stroke-opacity="0.35"/>
|
||||||
|
<path d="M24 12 v8 M24 28 v8 M12 24 h8 M28 24 h8"/>
|
||||||
|
<circle cx="24" cy="24" r="6"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Zugang</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-1 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-1 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Zugang & Bedienung</h3>
|
||||||
|
<p class="sub">So arbeiten Menschen und Systeme mit PowerOn</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Chat, Arbeitsfläche, Sprache</li>
|
||||||
|
<li>➔ Im Browser, als App, Anbindung an Ihre IT</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 2: Orchestrierung -->
|
||||||
|
<div class="row-2 rocket-tier t-orch">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<rect x="8" y="22" width="18" height="16" rx="2"/>
|
||||||
|
<path d="M26 26 h10 M26 30 h10 M26 34 h10"/>
|
||||||
|
<circle cx="38" cy="18" r="5"/>
|
||||||
|
<path d="M33 22 L36 20 M26 22 L22 18"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Steuerung</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-2 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-2 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Steuerung & KI-Helfer</h3>
|
||||||
|
<p class="sub">Die KI plant Schritte und koordiniert das Weitere</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Gespräche, Aufgaben und Abläufe im Griff</li>
|
||||||
|
<li>➔ Übergibt Arbeit an Programme und Schnittstellen</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 3: Branchen -->
|
||||||
|
<div class="row-3 rocket-tier t-industry">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<rect x="10" y="12" width="28" height="26" rx="2"/>
|
||||||
|
<path d="M10 20 h28 M18 12 v8 M30 12 v8"/>
|
||||||
|
<circle cx="18" cy="30" r="3"/>
|
||||||
|
<circle cx="30" cy="30" r="3"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Branchen</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-3 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-3 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Fachlösungen</h3>
|
||||||
|
<p class="sub">Vorgefertigt für konkrete Berufsfelder</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Treuhand & Buchhaltung, Immobilien & Grundstücke</li>
|
||||||
|
<li>➔ Coaching, Schulung, Unterstützung in Microsoft Teams</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 4: Skills & Automation -->
|
||||||
|
<div class="row-4 rocket-tier t-skills">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<path d="M14 32 c8 -4 12 -12 12 -20 c0 -4 -2 -6 -5 -6 c-4 0 -7 4 -7 10 c0 6 4 10 10 10 z"/>
|
||||||
|
<circle cx="30" cy="22" r="9"/>
|
||||||
|
<path d="M30 16 v12 M24 22 h12"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Aktionen</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-4 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-4 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Automatisierung & Aktionen</h3>
|
||||||
|
<p class="sub">Routine läuft, ohne dass alles manuell geklickt wird</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Abläufe starten nach Zeitplan oder Ereignis (z. B. E-Mail)</li>
|
||||||
|
<li>➔ Verbindet Microsoft, Google und weitere Tools</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 5: KI-Modelle -->
|
||||||
|
<div class="row-5 rocket-tier t-models">
|
||||||
|
<span class="fin fin-l" aria-hidden="true"></span>
|
||||||
|
<span class="fin fin-r" aria-hidden="true"></span>
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<path d="M12 38 L22 10 L38 38 Z M18 28 h14"/>
|
||||||
|
<line x1="26" y1="18" x2="32" y2="32"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Modelle</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-5 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-5 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>KI-Modelle</h3>
|
||||||
|
<p class="sub">Sie wählen – nicht an einen einzigen Anbieter gebunden</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Einsatz führender KI-Anbieter nach Bedarf</li>
|
||||||
|
<li>➔ Eigene KI im eigenen Rechenzentrum möglich</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<!-- Row 6: Unified Data Bar -->
|
||||||
|
<div class="row-6 rocket-tier t-data">
|
||||||
|
<div class="tier-body">
|
||||||
|
<svg class="tier-icon" viewBox="0 0 48 48" fill="none" stroke="currentColor" stroke-width="1.8" aria-hidden="true">
|
||||||
|
<rect x="8" y="14" width="32" height="22" rx="2"/>
|
||||||
|
<line x1="12" y1="20" x2="36" y2="20"/>
|
||||||
|
<line x1="12" y1="25" x2="28" y2="25"/>
|
||||||
|
<line x1="12" y1="30" x2="32" y2="30"/>
|
||||||
|
<circle cx="38" cy="10" r="6"/>
|
||||||
|
<path d="M40 12 l4 4" stroke-linecap="round"/>
|
||||||
|
</svg>
|
||||||
|
<span class="tier-label">Daten</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row-6 ribbon"><div class="ribbon-inner"></div></div>
|
||||||
|
<article class="row-6 layer-card">
|
||||||
|
<div class="card-shell">
|
||||||
|
<div class="tab" aria-hidden="true"></div>
|
||||||
|
<div class="card-body">
|
||||||
|
<h3>Datenleiste & Wissen</h3>
|
||||||
|
<p class="sub">Alle wichtigen Quellen an einem Ort für die KI</p>
|
||||||
|
<ul>
|
||||||
|
<li>➔ Dateien und Ablagen – sichtbar wie eine gemeinsame Leiste</li>
|
||||||
|
<li>➔ Antworten mit Bezug zu Ihren Unterlagen & Gesprächen</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
|
||||||
|
<aside class="gov gov-right">
|
||||||
|
<div class="gov-title">Kosten & Nachvollziehbarkeit</div>
|
||||||
|
<div class="gov-sub">Zahlen nach tatsächlicher Nutzung<br>Wer hat was gemacht?<br>Kosten pro Kunde / Mandant<br>Nachvollziehbare Entscheidungen</div>
|
||||||
|
</aside>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
174
docs/product-teaser-billing-poweron.md
Normal file
174
docs/product-teaser-billing-poweron.md
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
# PowerOn Billing Product Teaser - Recherche & Analyse
|
||||||
|
|
||||||
|
## Zusammenfassung der Recherche-Ergebnisse
|
||||||
|
|
||||||
|
### Abrechnungsmodelle
|
||||||
|
PowerOn bietet **4 flexible Abrechnungsmodelle**, die auf unterschiedliche Unternehmensanforderungen zugeschnitten sind:
|
||||||
|
|
||||||
|
1. **PREPAY_MANDATE** - Gemeinsames Prepaid-Guthaben fuer das gesamte Mandat
|
||||||
|
2. **PREPAY_USER** - Individuelles Prepaid-Guthaben pro Benutzer (Standard-Startguthaben: 10 CHF)
|
||||||
|
3. **CREDIT_POSTPAY** - Kreditrahmen mit monatlicher Abrechnung (erfordert Rechnungsadresse)
|
||||||
|
4. **UNLIMITED** - Unbegrenzt (nur fuer interne Mandate)
|
||||||
|
|
||||||
|
### Preisstruktur
|
||||||
|
- **Pay-per-Use**: Abrechnung nach tatsaechlicher KI-Nutzung
|
||||||
|
- **Transparente Aufschlaege**: 100% Markup auf Provider-Kosten (Faktor 2.0)
|
||||||
|
- 50% fuer Infrastruktur und Platform Service
|
||||||
|
- 50% fuer Waehrungsrisiko
|
||||||
|
- **Waehrung**: Schweizer Franken (CHF)
|
||||||
|
- **Aufladungsbetraege**: 10, 25, 50, 100, 250, 500 CHF
|
||||||
|
- **Zahlungsmethode**: Stripe Checkout (Kreditkarte)
|
||||||
|
|
||||||
|
### Kernmerkmale
|
||||||
|
- **Mandanten-basierte Abrechnung**: Isolierte Konten pro Mandat
|
||||||
|
- **Echtzeit-Transparenz**: Sofortige Kostenzuordnung nach jedem KI-Aufruf
|
||||||
|
- **Detaillierte Statistiken**: Nach Provider, Modell, Feature, Zeitraum
|
||||||
|
- **Warnungen**: Konfigurierbare Schwellenwerte (Standard: 10%)
|
||||||
|
- **Flexible Kontrolle**: Blockierung bei Nullsaldo optional
|
||||||
|
- **RBAC-Integration**: Feingranulare Zugriffskontrolle auf AI-Provider
|
||||||
|
|
||||||
|
### Technische Details
|
||||||
|
- **Keine Abonnements**: One-time Payments, keine wiederkehrenden Gebuehren
|
||||||
|
- **Webhook-Integration**: Automatische Gutschrift nach Zahlung
|
||||||
|
- **API-First**: Vollstaendige REST-API fuer Billing-Operationen
|
||||||
|
- **Audit-Trail**: Vollstaendige Transaktionshistorie
|
||||||
|
|
||||||
|
## Product Teaser fuer Homepage
|
||||||
|
|
||||||
|
Der folgende Text ist **Copy & Paste ready** und fuer die Homepage optimiert.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Transparente Abrechnung. Volle Kostenkontrolle.
|
||||||
|
|
||||||
|
## Bezahlen Sie nur, was Sie nutzen - fair, transparent und flexibel
|
||||||
|
|
||||||
|
PowerOn bietet ein modernes, nutzungsbasiertes Abrechnungssystem, das sich Ihren Geschaeftsanforderungen anpasst. Keine versteckten Kosten, keine ueberraschenden Rechnungen - nur klare, nachvollziehbare Preise fuer die KI-Leistungen, die Sie tatsaechlich nutzen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Unsere Abrechnungsmodelle
|
||||||
|
|
||||||
|
### Prepaid fuer volle Kontrolle
|
||||||
|
**Prepaid Mandant** - Gemeinsames Guthaben fuer Ihr gesamtes Team. Ideal fuer Organisationen, die zentrale Budgetkontrolle bevorzugen.
|
||||||
|
|
||||||
|
**Prepaid Benutzer** - Individuelles Guthaben pro Mitarbeiter. Perfekt fuer dezentrale Teams mit eigenstaendiger Kostenverwaltung.
|
||||||
|
|
||||||
|
- Startguthaben von 10 CHF fuer neue Benutzer
|
||||||
|
- Flexible Aufladung: 10, 25, 50, 100, 250 oder 500 CHF
|
||||||
|
- Einfache Zahlung per Kreditkarte
|
||||||
|
- Sofortige Gutschrift nach Zahlung
|
||||||
|
|
||||||
|
### Kreditrahmen fuer etablierte Kunden
|
||||||
|
**Credit Postpay** - Arbeiten Sie mit einem Kreditrahmen und erhalten Sie monatliche Rechnungen. Ideal fuer Unternehmen mit etablierten Prozessen und hoeherem Nutzungsvolumen.
|
||||||
|
|
||||||
|
- Individuell vereinbarter Kreditrahmen
|
||||||
|
- Monatliche Abrechnung
|
||||||
|
- Rechnungsstellung an Ihre Firmenadresse
|
||||||
|
- Keine Vorauszahlung erforderlich
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## So funktioniert die Preisgestaltung
|
||||||
|
|
||||||
|
### Pay-per-Use - Fair und transparent
|
||||||
|
Sie bezahlen ausschliesslich fuer die tatsaechlich genutzten KI-Leistungen. Jeder Aufruf wird praezise erfasst und Ihrem Konto zugeordnet.
|
||||||
|
|
||||||
|
### Klare Preisstruktur
|
||||||
|
Unsere Preise basieren auf den Kosten der fuehrenden KI-Provider (OpenAI, Anthropic, etc.) mit einem transparenten Aufschlag fuer:
|
||||||
|
- Infrastruktur und Platform Services
|
||||||
|
- Waehrungsabsicherung und Stabilitaet
|
||||||
|
- Support und Betrieb
|
||||||
|
|
||||||
|
**Alle Preise in Schweizer Franken (CHF)** - keine Waehrungsrisiken fuer Sie.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ihre Vorteile auf einen Blick
|
||||||
|
|
||||||
|
### Volle Transparenz
|
||||||
|
- **Echtzeit-Uebersicht**: Sehen Sie Ihr aktuelles Guthaben jederzeit ein
|
||||||
|
- **Detaillierte Statistiken**: Kosten nach Provider, Modell, Feature und Zeitraum
|
||||||
|
- **Vollstaendige Historie**: Jede Transaktion nachvollziehbar dokumentiert
|
||||||
|
|
||||||
|
### Intelligente Kontrolle
|
||||||
|
- **Warnungen**: Automatische Benachrichtigung bei niedrigem Guthaben
|
||||||
|
- **Flexible Limits**: Optionale Blockierung bei Nullsaldo
|
||||||
|
- **Budget-Management**: Individuelle Schwellenwerte pro Mandat
|
||||||
|
|
||||||
|
### Sicherheit und Compliance
|
||||||
|
- **Mandanten-Isolation**: Strikte Trennung zwischen Organisationen
|
||||||
|
- **Audit-Trail**: Vollstaendige Nachverfolgbarkeit aller Transaktionen
|
||||||
|
- **DSGVO-konform**: Schweizer Datenschutzstandards
|
||||||
|
|
||||||
|
### Einfache Verwaltung
|
||||||
|
- **Self-Service**: Guthaben jederzeit selbst aufladen
|
||||||
|
- **Keine Vertraege**: Keine Mindestlaufzeiten oder Kuendigungsfristen
|
||||||
|
- **Sofortige Aktivierung**: Nach Zahlung direkt einsatzbereit
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Fuer wen ist welches Modell geeignet?
|
||||||
|
|
||||||
|
| Ihr Bedarf | Empfohlenes Modell | Vorteil |
|
||||||
|
|------------|-------------------|---------|
|
||||||
|
| Kleine Teams, erste Schritte mit KI | **Prepaid Benutzer** | Jeder verwaltet sein eigenes Budget |
|
||||||
|
| Zentrale Kostenkontrolle | **Prepaid Mandant** | Ein gemeinsames Budget fuer alle |
|
||||||
|
| Etablierte Prozesse, hoeheres Volumen | **Credit Postpay** | Arbeiten ohne Vorauszahlung, monatliche Rechnung |
|
||||||
|
| Pilotprojekte, flexible Nutzung | **Prepaid Mandant** | Schneller Start, volle Flexibilitaet |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Haeufig gestellte Fragen
|
||||||
|
|
||||||
|
**Gibt es versteckte Kosten?**
|
||||||
|
Nein. Sie bezahlen ausschliesslich fuer die tatsaechlich genutzten KI-Leistungen. Keine Setup-Gebuehren, keine Grundgebuehren, keine versteckten Zuschlaege.
|
||||||
|
|
||||||
|
**Wie schnell wird mein Guthaben gutgeschrieben?**
|
||||||
|
Sofort nach erfolgreicher Zahlung. Sie koennen direkt weiterarbeiten.
|
||||||
|
|
||||||
|
**Kann ich zwischen Modellen wechseln?**
|
||||||
|
Ja, Ihr Administrator kann das Abrechnungsmodell jederzeit anpassen - je nach Entwicklung Ihrer Anforderungen.
|
||||||
|
|
||||||
|
**Welche Zahlungsmethoden werden akzeptiert?**
|
||||||
|
Aktuell: Kreditkarte ueber Stripe Checkout. Fuer Credit Postpay: Rechnung per E-Mail.
|
||||||
|
|
||||||
|
**Wie detailliert ist die Kostenaufschluesselung?**
|
||||||
|
Sehr detailliert. Sie sehen fuer jede Transaktion: Provider, Modell, Feature, Benutzer, Zeitpunkt und Kosten.
|
||||||
|
|
||||||
|
**Was passiert, wenn mein Guthaben aufgebraucht ist?**
|
||||||
|
Je nach Konfiguration erhalten Sie eine Warnung oder KI-Funktionen werden blockiert. Sie koennen jederzeit selbst Guthaben aufladen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Jetzt starten
|
||||||
|
|
||||||
|
Beginnen Sie mit einem Prepaid-Modell und 10 CHF Startguthaben pro Benutzer. Keine Kreditkarte erforderlich fuer den ersten Test.
|
||||||
|
|
||||||
|
**Bereit fuer den naechsten Schritt?**
|
||||||
|
Kontaktieren Sie uns fuer eine persoenliche Demo oder starten Sie direkt mit Ihrem Team.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technische Details fuer IT-Verantwortliche
|
||||||
|
|
||||||
|
- **API-First**: Vollstaendige REST-API fuer Billing-Operationen
|
||||||
|
- **Webhook-Integration**: Automatische Verarbeitung von Zahlungsereignissen
|
||||||
|
- **RBAC-Integration**: Feingranulare Zugriffskontrolle auf AI-Provider
|
||||||
|
- **Stripe-Integration**: Sichere Zahlungsabwicklung nach PCI-DSS
|
||||||
|
- **Echtzeit-Abrechnung**: Sofortige Kostenzuordnung nach jedem AI-Call
|
||||||
|
- **Statistik-Aggregation**: Nach Tag, Monat, Jahr mit Breakdown nach Provider/Feature
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Kontakt
|
||||||
|
|
||||||
|
**PowerOn AG**
|
||||||
|
Zuerich, Schweiz
|
||||||
|
|
||||||
|
Haben Sie Fragen zu unseren Abrechnungsmodellen?
|
||||||
|
Unser Team beraet Sie gerne persoenlich.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Stand: Maerz 2026*
|
||||||
245
docs/product-teaser-poweron.md
Normal file
245
docs/product-teaser-poweron.md
Normal file
|
|
@ -0,0 +1,245 @@
|
||||||
|
# PowerOn Product Teaser
|
||||||
|
*Ihre KI-Plattform. Ein Arbeitsplatz. Alle Moeglichkeiten.*
|
||||||
|
|
||||||
|
## Die KI-Plattform fuer produktivere Teams
|
||||||
|
*Weniger Aufwand, bessere Ergebnisse -- ab dem ersten Tag.*
|
||||||
|
|
||||||
|
PowerOn ist die zentrale Arbeitsplattform fuer Unternehmen, die Prozesse vereinfachen, Wissen skalieren und wiederkehrende Aufgaben intelligent automatisieren wollen.
|
||||||
|
Auch ohne technisches Vorwissen starten Teams schnell: klare Oberflaechen, gefuehrte Workflows und direkt nutzbare KI-Funktionen helfen ab dem ersten Tag.
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `HERO_SCREENSHOT`
|
||||||
|
> **Empfohlener Inhalt:** Startseite oder Dashboard mit PowerOn Branding und klarer Hauptnavigation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Was ist PowerOn?
|
||||||
|
*KI, Zusammenarbeit und Automatisierung -- vereint in einer Plattform.*
|
||||||
|
|
||||||
|
PowerOn verbindet KI-Assistenten, teamweite Zusammenarbeit und Automatisierung in einer Plattform. Unternehmen erhalten damit einen digitalen Arbeitsplatz, in dem Beratung, Meetings, Prozesse und Fachdaten in einem einheitlichen Erlebnis zusammenkommen.
|
||||||
|
|
||||||
|
### Ihr Nutzen auf einen Blick
|
||||||
|
*Fuenf Gruende, warum Unternehmen auf PowerOn setzen.*
|
||||||
|
|
||||||
|
- Schnellere Entscheidungen durch kontextbezogene KI-Unterstuetzung
|
||||||
|
- Weniger manuelle Arbeit durch wiederverwendbare Automationen
|
||||||
|
- Hoehere Qualitaet durch standardisierte Ablaufe und transparente Ergebnisse
|
||||||
|
- Bessere Zusammenarbeit, weil Teams in vertrauten Umgebungen arbeiten koennen
|
||||||
|
- Skalierbarkeit fuer wachsende Organisationen und unterschiedliche Mandate
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_DASHBOARD`
|
||||||
|
> **Empfohlener Inhalt:** Uebersichtsseite mit zentralen Kacheln, Kennzahlen oder Einstiegen in Features
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_NAVIGATION`
|
||||||
|
> **Empfohlener Inhalt:** Linke Navigation mit den Bereichen Power Desktop, Test Coach, Teams Bot, Automation und Machbarkeitsstudie
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_FEATURE_STORE`
|
||||||
|
> **Empfohlener Inhalt:** Feature-Store mit aktivierbaren Modulen
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Feature 1: Power Desktop (AI Workspace)
|
||||||
|
*Ihr digitaler Schreibtisch -- alles an einem Ort.*
|
||||||
|
|
||||||
|
Power Desktop ist der zentrale Arbeitsbereich fuer produktives, KI-gestuetztes Arbeiten. Teams finden dort die wichtigsten Werkzeuge in einer durchgaengigen Umgebung: Chat, Editor und experimentelle KI-Arbeitsflaechen.
|
||||||
|
|
||||||
|
### Was neue Kunden daran schaetzen
|
||||||
|
*Der Mehrwert, der sofort spuerbar ist.*
|
||||||
|
|
||||||
|
- Ein Ort fuer Ideen, Inhalte und Umsetzung
|
||||||
|
- Weniger Tool-Wechsel, mehr Fokus im Tagesgeschaeft
|
||||||
|
- Schneller Einstieg auch fuer Nicht-Techniker durch klare Bedienlogik
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
*Chat, Editor und Playground in einer Umgebung.*
|
||||||
|
|
||||||
|
- KI-Chat fuer Fragen, Entwuerfe und iterative Verbesserungen
|
||||||
|
- Editor-Arbeitsbereich fuer strukturierte Inhalte und Dokumentation
|
||||||
|
- Playground-Bereich zum Testen und Verfeinern von KI-gestuetzten Loesungen
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_WORKSPACE_OVERVIEW`
|
||||||
|
> **Empfohlener Inhalt:** Gesamtansicht des Workspaces mit mehreren Bereichen
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_WORKSPACE_CHAT`
|
||||||
|
> **Empfohlener Inhalt:** Konkrete Chat-Interaktion mit verwertbarer Antwort
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_WORKSPACE_CODE`
|
||||||
|
> **Empfohlener Inhalt:** Editor-Ansicht mit klaren Arbeitsflaechen
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Feature 2: Test Coach (Kommunikations-Coach)
|
||||||
|
*Besser kommunizieren -- mit KI als persoenlichem Sparringspartner.*
|
||||||
|
|
||||||
|
Der Test Coach unterstuetzt Mitarbeitende und Fuehrungskraefte dabei, Kommunikationssituationen gezielt zu trainieren. Statt abstrakter Theorie liefert die KI konkrete, direkt anwendbare Impulse fuer den Berufsalltag.
|
||||||
|
|
||||||
|
### Was neue Kunden daran schaetzen
|
||||||
|
*Persoenliches Wachstum, messbar und alltagsnah.*
|
||||||
|
|
||||||
|
- Sichereres Auftreten in schwierigen Gespraechen
|
||||||
|
- Kontinuierliche Weiterentwicklung mit messbarem Fortschritt
|
||||||
|
- Individuelle Unterstuetzung passend zum persoenlichen Kommunikationsstil
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
*Von Themenauswahl bis Gamification -- alles in einem Dossier.*
|
||||||
|
|
||||||
|
- Coaching-Kontexte fuer Themen, Ziele und Herausforderungen
|
||||||
|
- Session-basiertes Training mit KI-Dialogen
|
||||||
|
- Aufgaben, Fortschritt und Verlauf in einem Dossier gebuendelt
|
||||||
|
- Sprachunterstuetzung fuer natuerlichere Lernsituationen
|
||||||
|
- Motivierende Elemente wie Streaks, Scores und Badges
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_COACH_DASHBOARD`
|
||||||
|
> **Empfohlener Inhalt:** Dashboard mit KPIs (z. B. Streak, Score, Badges)
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_COACH_SESSION`
|
||||||
|
> **Empfohlener Inhalt:** Laufende Coaching-Session mit Chat-Verlauf
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_COACH_DOSSIER`
|
||||||
|
> **Empfohlener Inhalt:** Dossier mit Tabs fuer Aufgaben, Sessions und Dokumente
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Feature 3: Teams Bot
|
||||||
|
*KI-Unterstuetzung dort, wo Ihr Team bereits arbeitet -- in Microsoft Teams.*
|
||||||
|
|
||||||
|
Der Teams Bot bringt KI-Unterstuetzung direkt in Microsoft Teams Meetings. Er kann Sitzungen begleiten, Inhalte erfassen und kontextbezogene Antworten bereitstellen.
|
||||||
|
|
||||||
|
### Was neue Kunden daran schaetzen
|
||||||
|
*Meetings produktiver machen, ohne Gewohnheiten zu aendern.*
|
||||||
|
|
||||||
|
- Sofortiger Mehrwert in bereits etablierten Meeting-Prozessen
|
||||||
|
- Besseres Informationsmanagement durch strukturierte Protokollierung
|
||||||
|
- Schnellere Nachbereitung durch KI-gestuetzte Unterstuetzung
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
*Ein Link genuegt -- der Bot uebernimmt den Rest.*
|
||||||
|
|
||||||
|
- Start einer Session ueber Meeting-Link
|
||||||
|
- Unterstuetzung verschiedener Join-Modi (z. B. Bot oder Benutzerkonto)
|
||||||
|
- Laufende Verarbeitung von Meeting-Inhalten
|
||||||
|
- KI-Antworten als Chat, Audio oder kombiniert
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_TEAMSBOT_START`
|
||||||
|
> **Empfohlener Inhalt:** Formular/Ansicht zum Start einer Teams-Bot-Session
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_TEAMSBOT_LIVE`
|
||||||
|
> **Empfohlener Inhalt:** Aktive Session mit Status und Live-Interaktion
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Feature 4: Automation
|
||||||
|
*Einmal definieren, immer wieder zuverlaessig ausfuehren.*
|
||||||
|
|
||||||
|
Automation in PowerOn macht wiederkehrende Aufgaben planbar und zuverlaessig. Unternehmen definieren Vorlagen einmal und fuehren Prozesse danach manuell oder zeitgesteuert aus.
|
||||||
|
|
||||||
|
### Was neue Kunden daran schaetzen
|
||||||
|
*Weniger Routine, mehr Raum fuer das Wesentliche.*
|
||||||
|
|
||||||
|
- Spuerbare Entlastung bei repetitiven Aufgaben
|
||||||
|
- Konstante Prozessqualitaet ueber Teams hinweg
|
||||||
|
- Mehr Zeit fuer wertschaffende Arbeit
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
*Templates, Zeitplanung und Echtzeit-Transparenz.*
|
||||||
|
|
||||||
|
- Verwaltung von Automations-Definitionen
|
||||||
|
- Wiederverwendbare Templates fuer typische Geschaeftsprozesse
|
||||||
|
- Geplante oder sofortige Ausfuehrung
|
||||||
|
- Transparente Rueckmeldungen ueber Live-Logs
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_AUTOMATION_LIST`
|
||||||
|
> **Empfohlener Inhalt:** Uebersicht der vorhandenen Automationen
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_AUTOMATION_EDIT`
|
||||||
|
> **Empfohlener Inhalt:** Erstellungs- oder Bearbeitungsmaske mit Template-Auswahl
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_AUTOMATION_LOGS`
|
||||||
|
> **Empfohlener Inhalt:** Laufende oder abgeschlossene Ausfuehrung mit Log-Anzeige
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Feature 5: Machbarkeitsstudie (Real Estate)
|
||||||
|
*Immobilienpotenziale in Minuten statt Tagen bewerten.*
|
||||||
|
|
||||||
|
Die Machbarkeitsstudie unterstuetzt bei der schnellen Erstbewertung von Immobilienpotenzialen. Relevante Informationen aus Regelwerken werden strukturiert extrahiert und als verwertbare Entscheidungsgrundlage aufbereitet.
|
||||||
|
|
||||||
|
### Was neue Kunden daran schaetzen
|
||||||
|
*Fundierte Entscheidungen frueher im Projektverlauf.*
|
||||||
|
|
||||||
|
- Schnellere Vorpruefung von Immobilienprojekten
|
||||||
|
- Bessere Entscheidungsgrundlagen in fruehen Projektphasen
|
||||||
|
- Klar strukturierte Ergebnisse statt unuebersichtlicher Rohdaten
|
||||||
|
|
||||||
|
### Kernfunktionen
|
||||||
|
*Automatische Analyse von Regelwerken und Parzellendaten.*
|
||||||
|
|
||||||
|
- KI-gestuetzte Extraktion von BZO-Inhalten
|
||||||
|
- Aufbereitung zentraler Fakten
|
||||||
|
- Konkrete Vorschlaege zur Einschaetzung von Potenzialen
|
||||||
|
- Zusatzinformationen fuer vertiefte Pruefungen
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_REALESTATE_MAP`
|
||||||
|
> **Empfohlener Inhalt:** Karten-/Parzellenansicht im Real-Estate-Bereich
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_REALESTATE_MACHBARKEIT`
|
||||||
|
> **Empfohlener Inhalt:** Ergebnisbereich mit Fakten und Vorschlaegen
|
||||||
|
|
||||||
|
> **Screenshot-Platzhalter:** `SCREENSHOT_REALESTATE_BZO`
|
||||||
|
> **Empfohlener Inhalt:** Detailansicht der BZO-Extraktion
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Warum PowerOn fuer neue Kunden?
|
||||||
|
*Eine Plattform, die mit Ihren Anforderungen waechst.*
|
||||||
|
|
||||||
|
PowerOn ist darauf ausgelegt, den Einstieg in KI-gestuetztes Arbeiten einfach zu machen und zugleich professionellen Mehrwert zu liefern. Statt isolierter Einzelloesungen erhalten Unternehmen eine skalierbare Plattform, die Menschen, Prozesse und KI wirkungsvoll verbindet.
|
||||||
|
|
||||||
|
### Besonders relevant fuer Nicht-Techies
|
||||||
|
*Kein Vorwissen noetig -- einfach loslegen.*
|
||||||
|
|
||||||
|
- Intuitive Bedienung statt technischer Komplexitaet
|
||||||
|
- Klare, gefuehrte Workflows
|
||||||
|
- Sofort sichtbarer Nutzen in Alltagsszenarien
|
||||||
|
- Schrittweise Erweiterung je nach Bedarf
|
||||||
|
|
||||||
|
### Call to Action
|
||||||
|
*Jetzt den naechsten Schritt machen.*
|
||||||
|
|
||||||
|
Starten Sie mit den wichtigsten Anwendungsfaellen in Ihrem Team und bauen Sie Ihre KI-gestuetzten Prozesse mit PowerOn systematisch aus.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Benoetigte Screenshots (Uebersicht)
|
||||||
|
*Alle visuellen Platzhalter auf einen Blick.*
|
||||||
|
|
||||||
|
| Platzhalter | Benoetigter Screenshot | Empfohlene Perspektive |
|
||||||
|
| --- | --- | --- |
|
||||||
|
| `HERO_SCREENSHOT` | PowerOn Startseite mit Branding | Vollansicht mit Logo, Claim, Einstieg |
|
||||||
|
| `SCREENSHOT_DASHBOARD` | Hauptdashboard nach Login | Uebersicht mit wichtigsten Einstiegen |
|
||||||
|
| `SCREENSHOT_NAVIGATION` | Seitennavigation mit Feature-Liste | Fokus auf Feature-Namen und Struktur |
|
||||||
|
| `SCREENSHOT_FEATURE_STORE` | Feature-Store | Sichtbare Feature-Kacheln/Module |
|
||||||
|
| `SCREENSHOT_WORKSPACE_OVERVIEW` | Power Desktop Gesamtansicht | Mehrere Arbeitsbereiche gleichzeitig |
|
||||||
|
| `SCREENSHOT_WORKSPACE_CHAT` | Chat-Bereich | Konkrete Konversation mit KI-Antwort |
|
||||||
|
| `SCREENSHOT_WORKSPACE_CODE` | Editor-Bereich | Klar lesbare Arbeitsumgebung |
|
||||||
|
| `SCREENSHOT_COACH_DASHBOARD` | Coach Dashboard | KPIs wie Streak, Score, Badges |
|
||||||
|
| `SCREENSHOT_COACH_SESSION` | Aktive Coach Session | Laufender Dialog und Session-Kontext |
|
||||||
|
| `SCREENSHOT_COACH_DOSSIER` | Coach Dossier | Tabs/Abschnitte mit Aufgaben und Verlauf |
|
||||||
|
| `SCREENSHOT_TEAMSBOT_START` | Teams Bot Start | Meeting-Link und Session-Einstellungen |
|
||||||
|
| `SCREENSHOT_TEAMSBOT_LIVE` | Teams Bot Live Session | Session-Status und Interaktion |
|
||||||
|
| `SCREENSHOT_AUTOMATION_LIST` | Automation-Liste | Definitions-Uebersicht |
|
||||||
|
| `SCREENSHOT_AUTOMATION_EDIT` | Automation bearbeiten | Template + Parameter sichtbar |
|
||||||
|
| `SCREENSHOT_AUTOMATION_LOGS` | Automation-Logs | Live- oder Abschlussprotokolle |
|
||||||
|
| `SCREENSHOT_REALESTATE_MAP` | Real-Estate Kartenansicht | Parzellen und Kontext sichtbar |
|
||||||
|
| `SCREENSHOT_REALESTATE_MACHBARKEIT` | Machbarkeitsstudie Ergebnis | Fakten, Vorschlaege, strukturierte Ausgabe |
|
||||||
|
| `SCREENSHOT_REALESTATE_BZO` | BZO-Extraktionsdetails | Ausgelesene Regel-/Detailinformationen |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hinweis zur Verwendung
|
||||||
|
*So werden aus Platzhaltern fertige Bilder.*
|
||||||
|
|
||||||
|
Alle Platzhalter koennen spaeter im selben Dokument durch finale Bilder ersetzt werden, z. B. als direkte Markdown-Bilder:
|
||||||
|
|
||||||
|
```md
|
||||||
|

|
||||||
|
```
|
||||||
BIN
docs/prompts-ui-tests.xlsx
Normal file
BIN
docs/prompts-ui-tests.xlsx
Normal file
Binary file not shown.
168
docs/screen-recording-script-ai-chat.md
Normal file
168
docs/screen-recording-script-ai-chat.md
Normal file
|
|
@ -0,0 +1,168 @@
|
||||||
|
# PORTO AI Chat — Screen-Recording-Skript (Werbeclip)
|
||||||
|
|
||||||
|
**Zielgruppe:** Entscheider, C-Level, Investoren
|
||||||
|
**Ton:** sachlich, vertrauensbildend, ohne Tech-Slang
|
||||||
|
**Gesamtlänge:** ca. 90–120 Sekunden
|
||||||
|
**Auflösung:** mindestens 1920×1080; UI zoomed / Browser auf 125–150 % falls nötig für Lesbarkeit
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Vorbereitung (nicht aufnehmen)
|
||||||
|
|
||||||
|
1. **Testdaten:** Eine anonymisierte PDF (z. B. «Muster-Vertrag») und ein kurzes internes Memo — keine echten Kundendaten.
|
||||||
|
2. **Workspace:** Bereits eingeloggt; eine leere oder neue Konversation wählen.
|
||||||
|
3. **Datenquelle (optional):** SharePoint- oder OneDrive-Testsite verbunden *oder* vorbereiteten Screen mit bereits verbundener Quelle (ohne sensible Namen).
|
||||||
|
4. **Provider:** Für eine Szene «Private LLM» sichtbar wählen — nur wenn in Ihrer Umgebung freigeschaltet; sonst Szene 7 weglassen oder durch «Mistral» ersetzen.
|
||||||
|
5. **Browser:** Tabs schliessen; keine persönlichen Lesezeichenleisten im Bild.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Struktur Overview
|
||||||
|
|
||||||
|
| Block | Dauer | Inhalt |
|
||||||
|
|-------------|---------|----------------------------------|
|
||||||
|
| Hook | ~5 s | Eine Zeile, die Aufmerksamkeit holt |
|
||||||
|
| Problem | ~15 s | Risiko + Lücke öffentlicher Chats |
|
||||||
|
| Demo | ~60 s | Walkthrough mit konkreten Prompts |
|
||||||
|
| CTA | ~15 s | Schweiz, Kontrolle, Erstgespräch |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene A — Hook (0:00–0:05)
|
||||||
|
|
||||||
|
**Bild:** Start auf PORTO Workspace (zentrale Chat-Ansicht, ruhig, keine Bewegung).
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Was wäre, wenn Ihre Teams mit KI chatten könnten — mit echtem Zugriff auf Ihre Dokumente, aber ohne dass sensible Daten das Unternehmen verlassen?»
|
||||||
|
|
||||||
|
**Action:** Keine Klicks; 1–2 Sekunden Pause, dann sanft zur linken Sidebar zoomen oder leicht scrollen (optional).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene B — Problem (0:05–0:20)
|
||||||
|
|
||||||
|
**Bild:** Kurz Browser-Tab oder Grafik weglassen — bleiben Sie in PORTO oder wechseln Sie zu einer neutralen Titelfolie «Das Problem» (optional).
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Öffentliche Chat-Tools sind schnell — aber sie kennen Ihre Verträge, Ihre SharePoint-Ordner und Ihre Compliance-Regeln nicht. Das Ergebnis: Copy-Paste, Medienbrüche und ein Risiko, das Audit und Vorstand nicht tragen wollen. PORTO AI Chat schliesst diese Lücke.»
|
||||||
|
|
||||||
|
**Action:** Zurück zum Workspace wechseln.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene C — Demo Teil 1: Dokument hochladen (0:20–0:30)
|
||||||
|
|
||||||
|
**Bild:** Workspace mit leerem oder neuem Chat; linke Leiste mit Dateien sichtbar.
|
||||||
|
|
||||||
|
**Action:** PDF per **Drag & Drop** in den Chat-Bereich ziehen *oder* über Datei-Upload anhängen. Warten, bis die Datei in der Konversation / Anhänge erscheint.
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Hier arbeiten Ihre Mitarbeitenden in einer geschützten Oberfläche. Sie ziehen ein Dokument hinein — und der KI-Agent kann es im Kontext nutzen.»
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene D — Demo Teil 2: Analyse-Prompt (0:30–0:45)
|
||||||
|
|
||||||
|
**Bild:** Cursor im Eingabefeld; dann Senden.
|
||||||
|
|
||||||
|
**Eingabetext (exakt oder leicht angepasst):**
|
||||||
|
|
||||||
|
```text
|
||||||
|
Fasse die wichtigsten Klauseln dieses Dokuments in maximal fünf Bulletpoints zusammen.
|
||||||
|
Hebe Haftung, Kündigung und Vertraulichkeit hervor. Antworte auf Deutsch.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Action:** Nach dem Senden **nicht** unterbrechen: kurz die **Streaming-Antwort** laufen lassen; wenn sichtbar, **Tool-Aktivität** oder Fortschritt in der rechten Spalte («Activity» / Tool-Log) mitfilmen.
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Statt manuell zu lesen und zu kopieren, stellt man eine präzise Frage. Die KI arbeitet mit dem Dokument — und Sie sehen, was im Hintergrund passiert. Transparenz statt Blackbox.»
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene E — Demo Teil 3: Datenquelle (0:45–1:00)
|
||||||
|
|
||||||
|
**Bild:** Linke Sidebar → Tab **Quellen / Datenquellen** (SharePoint, OneDrive o. Ä., je nach UI-Label).
|
||||||
|
|
||||||
|
**Action:** Bereits verbundene Quelle anzeigen *oder* kurz durch Ordner browsen (nur Testinhalte). Dann zurück in den Chat.
|
||||||
|
|
||||||
|
**Eingabetext (Prompt):**
|
||||||
|
|
||||||
|
```text
|
||||||
|
Suche in meiner verbundenen Datenquelle nach dem neuesten Dokument zum Thema "Onboarding"
|
||||||
|
und gib mir eine einzeilige Inhaltszusammenfassung. Wenn nichts passt, sag es klar.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«PORTO verbindet sich mit Ihren Systemen — SharePoint, OneDrive, Google Drive und mehr. Die KI beantwortet Fragen über Ihr Unternehmenswissen, nicht nur über das offene Internet.»
|
||||||
|
|
||||||
|
*Hinweis:* Wenn die Suche in der Demo leer zurückkommt, Voice-over anpassen: «Auch dann liefert das System eine klare Antwort — ohne zu halluzinieren.»
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene F — Demo Teil 4: Ergebnis als Datei (1:00–1:15)
|
||||||
|
|
||||||
|
**Bild:** Neuer Follow-up im selben Chat.
|
||||||
|
|
||||||
|
**Eingabetext (Prompt):**
|
||||||
|
|
||||||
|
```text
|
||||||
|
Erstelle auf Basis deiner letzten Antwort eine strukturierte Markdown-Datei
|
||||||
|
"Executive_Summary.md" mit Überschriften: Kontext, Kernpunkte, offene Fragen.
|
||||||
|
Schlage die Datei zur Freigabe vor, falls dein Workflow das vorsieht.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Action:** Wenn **Datei-Änderungsvorschlag** / Vorschau erscheint: kurz **Akzeptieren** oder Vorschau zeigen (je nach Produktverhalten). Optional rechte Spalte **Vorschau** einblenden.
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Der Agent liefert nicht nur Text im Chat — er kann Arbeitsergebnisse vorbereiten und zur Freigabe einreichen. Kontrolle bleibt beim Menschen.»
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene G — Demo Teil 5: Modellwahl (1:15–1:25)
|
||||||
|
|
||||||
|
**Bild:** Eingabebereich unten; **Provider-Auswahl** / Modell-Multiselect sichtbar machen.
|
||||||
|
|
||||||
|
**Action:** Dropdown öffnen; **Private LLM** (oder «Mistral» / konfigurierte Option) auswählen — ohne erneut zu senden, es sei denn Sie wollen eine kurze «Ping»-Antwort zeigen.
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«Entscheider interessiert: Welches Modell läuft? Hier wählen Sie es — bis hin zu Private LLM auf Schweizer Infrastruktur. Governance wird zur Einstellung, nicht zur Ausrede.»
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Szene H — CTA (1:25–1:40)
|
||||||
|
|
||||||
|
**Bild:** Ruhiger Vollbild-Workspace oder Ihre PORTO-/PowerOn-Schlussfolie mit Kontakt.
|
||||||
|
|
||||||
|
**Voice-over:**
|
||||||
|
«PORTO AI Chat macht produktive KI-Nutzung vereinbar mit Datenschutz und Kontrolle. PowerOn zeigt Ihnen im Erstgespräch, wie das in Ihrem konkreten Prozess funktioniert — von Treuhand bis Legal. Kontakt: poweron.swiss.»
|
||||||
|
|
||||||
|
**On-Screen-Text (optional, 3–4 Sekunden):**
|
||||||
|
`poweron.swiss` · `Erstgespräch vereinbaren`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Prompts — Schnellkopie (alle Demo-Eingaben)
|
||||||
|
|
||||||
|
```
|
||||||
|
1) Analyse:
|
||||||
|
Fasse die wichtigsten Klauseln dieses Dokuments in maximal fünf Bulletpoints zusammen.
|
||||||
|
Hebe Haftung, Kündigung und Vertraulichkeit hervor. Antworte auf Deutsch.
|
||||||
|
|
||||||
|
2) Datenquelle:
|
||||||
|
Suche in meiner verbundenen Datenquelle nach dem neuesten Dokument zum Thema "Onboarding"
|
||||||
|
und gib mir eine einzeilige Inhaltszusammenfassung. Wenn nichts passt, sag es klar.
|
||||||
|
|
||||||
|
3) Datei:
|
||||||
|
Erstelle auf Basis deiner letzten Antwort eine strukturierte Markdown-Datei
|
||||||
|
"Executive_Summary.md" mit Überschriften: Kontext, Kernpunkte, offene Fragen.
|
||||||
|
Schlage die Datei zur Freigabe vor, falls dein Workflow das vorsieht.
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technische Checkliste nach der Aufnahme
|
||||||
|
|
||||||
|
- [ ] Keine echten Kundennamen, E-Mails oder Vertragsnummern sichtbar
|
||||||
|
- [ ] Töne: optional dezente Hintergrundmusik (royalty-free), Voice-over dominant
|
||||||
|
- [ ] Untertitel (DE) für LinkedIn / stummes Abspielen empfohlen
|
||||||
|
- [ ] Endcard: Logo + URL + «Schweizer Datenhaltung»
|
||||||
BIN
docs/settings-ui-tests.xlsx
Normal file
BIN
docs/settings-ui-tests.xlsx
Normal file
Binary file not shown.
137
docs/slide-erfolgreicher-einsatz-von-ki.md
Normal file
137
docs/slide-erfolgreicher-einsatz-von-ki.md
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
# Erfolgreicher Einsatz von KI
|
||||||
|
|
||||||
|
*PowerPoint-Vorlage. Ersetzt die Ring-Grafik durch eine klar lesbare 5-Säulen-Darstellung. Jede Säule: Titel, ein Leitsatz (aus Originalgrafik), 2–3 konkrete Belege aus der PowerOn-Doku.*
|
||||||
|
|
||||||
|
**Quellenbasis:** Originalbild „Erfolgreicher Einsatz von KI" sowie `wiki/a-strategy/product-vision.md`, `wiki/b-reference/platform/rbac.md`, `wiki/b-reference/platform/neutralization.md`, `wiki/b-reference/gateway/ai-agent.md`, `wiki/e-compliance/security-overview.md`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 0 – Intro (Titelfolie)
|
||||||
|
|
||||||
|
### Von der KI-Idee zur Roadmap, die trägt.
|
||||||
|
|
||||||
|
**Ihre Daten bleiben in der Schweiz. Ergebnisse in Wochen, nicht Monaten.**
|
||||||
|
|
||||||
|
Was Sie in der Hand halten: Handlungsfelder · Kausalnetz · Umsetzungsroadmap · Steckbriefe.
|
||||||
|
|
||||||
|
**Kicker (oben, klein, grau, uppercase):** PRÄSENTATION · ERFOLGREICHER EINSATZ VON KI
|
||||||
|
**Footer (unten, dezent):** PowerOn · www.poweron.swiss
|
||||||
|
|
||||||
|
**Textbausteine in Reihenfolge:**
|
||||||
|
|
||||||
|
| Ebene | Inhalt | Stil |
|
||||||
|
|---|---|---|
|
||||||
|
| Kicker | `PRÄSENTATION · ERFOLGREICHER EINSATZ VON KI` | klein, uppercase, grau |
|
||||||
|
| Headline | `Von der KI-Idee zur Roadmap, die trägt.` | XXL, schwarz, 2 Zeilen (Umbruch nach „KI-Idee") |
|
||||||
|
| Subline | `Ihre Daten bleiben in der Schweiz. Ergebnisse in Wochen, nicht Monaten.` | mittel, dunkelgrau |
|
||||||
|
| Deck-Teaser | `Was Sie in der Hand halten: Handlungsfelder · Kausalnetz · Umsetzungsroadmap · Steckbriefe.` | klein, grau, eine Zeile |
|
||||||
|
| Footer | `PowerOn · www.poweron.swiss` | klein, grau |
|
||||||
|
|
||||||
|
**Begründung der Wortwahl:**
|
||||||
|
|
||||||
|
- „Von der KI-Idee zur Roadmap, die trägt" — verknüpft explizit mit Folie 4 („Das halten Sie am Ende in der Hand") und schafft eine Deck-Klammer. „die trägt" setzt den selbstbewussten Ton, ohne marktschreierisch zu wirken.
|
||||||
|
- „Ihre Daten bleiben in der Schweiz" — deckt Compliance (CISO), Differenzierung (CEO) und Risiko-Argument (CFO) in einem Satz ab.
|
||||||
|
- „Ergebnisse in Wochen, nicht Monaten" — bewusst ohne harte Zahl (Time-to-Value steht noch nicht verbindlich fest), aber mit klarem Erwartungsmanagement.
|
||||||
|
- „Was Sie in der Hand halten" — identischer Wording-Anker wie Folie 4. Konsistenz schafft Vertrauen.
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** 16:9, weißer Hintergrund, keine Hintergrund-Textur. Links 55 %: Kicker → Headline → Subline → Teaser, alles linksbündig. Rechts 45 %: die 5-Säulen-Grafik auf dezentem hellgrauen Panel mit abgerundeten Ecken, **mit Labels unter jedem Balken** (Use-Cases / Datenschutz / Berechtigungen / Verbindungen / Regeln / Ethik) in kleiner grauer Schrift. Dünne Trennlinie über dem Footer. Nur zwei Schriftgrößen-Stufen: Headline XXL + alles andere. Keine dekorativen Icons, keine Badges, keine Fülltextur.
|
||||||
|
|
||||||
|
> **C-Level-Logik:** EIN visueller Anker (Headline) statt fünf konkurrierender Hierarchieebenen. Ergebnis-Framing („Roadmap") spricht CEO, Subline fängt CFO („Wochen") und Compliance („Schweiz") gleichzeitig mit. Teaser baut inhaltliche Brücke zu Folie 4 – Leser versteht sofort, worauf das Deck hinausläuft. Generisches Wording hält die Zielgruppe offen; Branchen-Personalisierung bleibt dem Cover-Letter vorbehalten.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 1 – Übersicht: Die 5 Erfolgsfaktoren
|
||||||
|
|
||||||
|
### Erfolgreicher Einsatz von KI
|
||||||
|
|
||||||
|
**Fünf Säulen, die KI im Unternehmen sicher und wirksam machen.**
|
||||||
|
|
||||||
|
| # | Säule | Leitsatz (aus Originalgrafik) |
|
||||||
|
|---|---|---|
|
||||||
|
| 1 | **Use-Cases** | Der Einsatz basiert auf klar definierten Use-Cases. |
|
||||||
|
| 2 | **Datenschutz** | Keine sensitiven Daten gelangen nach aussen. |
|
||||||
|
| 3 | **Berechtigungen** | Die KI hat nur Zugriff auf klar definierte Daten. |
|
||||||
|
| 4 | **Verbindungen** | Einfache Anbindung von Informationsquellen und Agentensystemen. |
|
||||||
|
| 5 | **Regeln / Ethik** | Vertrauensvoller und fairer Einsatz der KI. |
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** 5 gleich grosse Karten nebeneinander (Icons oben, Titel fett, Leitsatz darunter). Kein Kreis, keine schräg gestellten Labels. Reihenfolge links → rechts von „Voraussetzung" (Use-Case) über „Schutz" (Datenschutz, Berechtigungen) und „Integration" (Verbindungen) bis „Leitplanken" (Regeln / Ethik).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 2 – Use-Cases
|
||||||
|
|
||||||
|
### Der Einsatz basiert auf klar definierten Use-Cases
|
||||||
|
|
||||||
|
- **Schrittweise Einführung** statt Big-Bang: „Schrittweise Integration, beginnend mit einfachen Use Cases." (product-vision.md)
|
||||||
|
- **Feature-Store-Architektur:** Mandanten aktivieren modular nur die Features, die sie brauchen (Workspace, Automation, CommCoach, Trustee …). Skaliert von Einzelanwendung bis Full-Suite. (product-vision.md)
|
||||||
|
- **Spezialisierte Agenten pro Aufgabe:** Chat, Workflow, Voice, RAG, Automation – jeder Agent auf seinen Use-Case zugeschnitten, koordiniert durch eine zentrale Engine. (product-vision.md)
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Drei Stufen-Icons (klein → mittel → gross), um die schrittweise Einführung zu zeigen.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 3 – Datenschutz
|
||||||
|
|
||||||
|
### Keine sensitiven Daten gelangen nach aussen
|
||||||
|
|
||||||
|
- **Datenschutz-Neutralisierer:** Sensitive Inhalte werden durch stabile Platzhalter ersetzt, bevor Text an externe KI-Modelle geht oder dauerhaft im RAG landet. Ein zentrales AI-Gate prüft jeden Modell-Call. (neutralization.md)
|
||||||
|
- **Hard-Mode:** Ist Neutralisierung erforderlich und scheitert, wird der Call blockiert – Inhalte gelangen nie im Klartext zum Modell. (neutralization.md)
|
||||||
|
- **Private-LLM-Option:** Für höchste Anforderungen kann ein lokal betriebenes Sprachmodell genutzt werden. In diesem Fall verlassen keine Daten die eigene Infrastruktur. (security-overview.md § 7.5)
|
||||||
|
- **Kein Training mit Kundendaten:** Über Enterprise-APIs der Anbieter vertraglich ausgeschlossen. (security-overview.md § 7.4)
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Symbol „Schild" mit einem ausgehenden Pfeil, der auf einen Filter trifft.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 4 – Berechtigungen
|
||||||
|
|
||||||
|
### Die KI hat nur Zugriff auf klar definierte Daten
|
||||||
|
|
||||||
|
- **Rollenbasierte Zugriffskontrolle (RBAC)** auf vier Stufen: System, Mandant, Feature und Feature-Instanz. (rbac.md)
|
||||||
|
- **Feingliedrige Zugriffsstufen** pro Aktion (Lesen, Erstellen, Bearbeiten, Löschen):
|
||||||
|
|
||||||
|
| Stufe | Zugriff |
|
||||||
|
|---|---|
|
||||||
|
| Kein Zugriff | Funktion nicht verfügbar |
|
||||||
|
| Eigene Daten | Nur selbst erstellte Einträge |
|
||||||
|
| Mandantendaten | Alle Daten des eigenen Mandanten |
|
||||||
|
| Alle Daten | Vollzugriff (Administratoren) |
|
||||||
|
|
||||||
|
*(security-overview.md § 4.1 / rbac.md)*
|
||||||
|
- **Vollständige Mandantentrennung:** Zugehörigkeitsprüfung bei jedem Zugriff serverseitig – keine mandantenübergreifenden Datenflüsse. (security-overview.md § 3)
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Schlüssel-Icon + vier abgestufte Balken (kein / eigene / Mandant / alle).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 5 – Verbindungen
|
||||||
|
|
||||||
|
### Einfache Anbindung von Informationsquellen und Agentensystemen
|
||||||
|
|
||||||
|
- **Toolbox-Registry:** Der Agent verfügt über thematische Tool-Gruppen (`core`, `ai`, `datasources`, `email`, `sharepoint`, `clickup`, `jira`, `workflow`, `trustee`) und kann bei Bedarf weitere zur Laufzeit nachfordern. (ai-agent.md)
|
||||||
|
- **Connection-abhängige Aktivierung:** External-Toolboxes werden nur freigeschaltet, wenn der Nutzer eine passende Connection hat (z. B. Microsoft, ClickUp, Jira). (ai-agent.md)
|
||||||
|
- **Modellunabhängigkeit:** Integration mit Anthropic, OpenAI, Mistral, Perplexity, Tavily und Private LLM – kein Vendor-Lock-in, das jeweils beste Modell pro Aufgabe. (product-vision.md)
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Hub-and-Spoke-Grafik: PowerOn in der Mitte, Connectoren nach aussen (SharePoint, ClickUp, Jira, Mail, Private LLM, externe Modelle).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Folie 6 – Regeln / Ethik
|
||||||
|
|
||||||
|
### Vertrauensvoller und fairer Einsatz der KI
|
||||||
|
|
||||||
|
- **Transparente KI-Datenverarbeitung:** Die Plattform legt offen, welche Daten an welche KI-Dienste übermittelt werden. (security-overview.md § 7)
|
||||||
|
- **Lückenloser Audit-Trail:** Alle sicherheitsrelevanten Aktionen (Zugriffe, Administratoraktionen, Berechtigungsänderungen, KI-Nutzung) werden automatisch protokolliert und sind für Compliance-Nachweise verfügbar. (security-overview.md § 8)
|
||||||
|
- **DSGVO-Betroffenenrechte als Self-Service:** Auskunft, Löschung, Datenübertragbarkeit und Berichtigung sind direkt in der Plattform implementiert. (security-overview.md § 2)
|
||||||
|
- **Kein unkontrolliertes Superuser-Konto:** Auch Administratoren unterliegen dem RBAC-System; jede Aktion ist nachvollziehbar. (security-overview.md § 4.3)
|
||||||
|
|
||||||
|
> **Visual-Hinweis:** Waage-Icon (Balance) plus ein Logbuch-Symbol für den Audit-Trail.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Übergeordneter Visual-Hinweis für PowerPoint
|
||||||
|
|
||||||
|
- **Statt Kreis → Zeile:** 5 gleich grosse Karten nebeneinander auf der Übersichts-Folie.
|
||||||
|
- **Farblogik konsistent halten:** pro Säule eine Farbe (z. B. wie im Original: Use-Cases grün, Datenschutz rosa, Berechtigungen blau, Verbindungen grau, Regeln/Ethik orange) und diese Farbe auch auf der jeweiligen Detail-Folie als Akzent verwenden.
|
||||||
|
- **Lesbarkeit:** Keine schräg gestellten Labels. Titel horizontal, Leitsatz in 1–2 Zeilen, Belege als Bullet-Liste mit max. 3–4 Einträgen pro Folie.
|
||||||
|
- **Quellen-Footer (optional):** klein am Folienrand: „Quelle: PowerOn Wiki – a-strategy / b-reference / e-compliance".
|
||||||
266
docs/social-clip-poweron-ai-desktop.md
Normal file
266
docs/social-clip-poweron-ai-desktop.md
Normal file
|
|
@ -0,0 +1,266 @@
|
||||||
|
# Social-Media-Werbeclip: PowerOn Desktop (AI Workspace)
|
||||||
|
|
||||||
|
Handbuch fuer einen **Stufen-Clip**: Funktionen und Vorteile **der Reihe nach** — aehnlich wie bei eurem **Treuhand- / Trustee-Beispiel** (Canva-Folie: grosse Schrittnummer, klare Headline, kurzer Erklaertext, zentrale Screenshot-Flaeche, optionaler Footer-Hinweis mit Pfeil).
|
||||||
|
|
||||||
|
**Medien:** Mockups, **selbst aufgezeichnete Screen Recordings**, optional Motion-Transitions zwischen den Stufen.
|
||||||
|
**Laenge:** ca. **30–60 s**, je nach Anzahl Stufen (pro Stufe typisch **3–5 s**).
|
||||||
|
**Plattformen:** Reels, Shorts, TikTok (**9:16**), LinkedIn (**1:1** oder **4:5**).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Dieses Format vs. „Problem–Loesung–Montage“
|
||||||
|
|
||||||
|
| Ansatz | Eignung |
|
||||||
|
|--------|---------|
|
||||||
|
| **Stufen-Clip (dieses Dokument)** | Zuschauer sollen **einzelne Staerken** nacheinander **merken** — wie eine Kurz-Praesentation. Ideal, wenn ihr **mehrere Funktionen** fair abhaengen wollt. |
|
||||||
|
| Reiner Hook–Pain–Solution-Clip | Ein emotionaler Bogen in 20–30 s; weniger Platz fuer **5+ konkrete Features**. |
|
||||||
|
|
||||||
|
Beides laesst sich kombinieren: **Stufe 0** = 2 s Hook, dann **Stufe 1 ff.** = Features.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Slide-/Card-Vorlage (orientiert am Trustee-Beispiel)
|
||||||
|
|
||||||
|
Pro **Stufe** eine visuelle Einheit (Canva-Slide, After-Effects-Comp oder Schnitt-Szene mit festem Layout):
|
||||||
|
|
||||||
|
| Bereich | Inhalt | Hinweis |
|
||||||
|
|---------|--------|---------|
|
||||||
|
| **Badge** (oben links, optional) | z. B. `Neu`, `PowerOn Desktop`, Kampagnen-Tag | Kurz; nicht jede Stufe muss ein Badge haben |
|
||||||
|
| **Schrittnummer** | Grosse Zahl `1`, `2`, `3` … | Sofort klar: „wir sind bei Schritt X“ |
|
||||||
|
| **Headline** | **Ein Nutzenversprechen** (nicht Techniklabel) | z. B. „Einfache Bedienung“, „Alles im Blick“ |
|
||||||
|
| **Fliesstext** (1–2 Saetze) | **Was die Funktion tut** + **warum es dem Nutzer hilft** | Verstaendlich fuer Nicht-Techies |
|
||||||
|
| **Akzentzeile** (optional, unten mit Pfeil) | Micro-CTA oder Feature-Kern | z. B. „Einfacher Drag and Drop“ — analog zu eurem Trustee-Slide |
|
||||||
|
| **Mittelband / Label** ueber dem Screenshot | **Name der gezeigten Funktion** | z. B. „Dokumenten-Upload“ bei Trustee; bei Desktop z. B. „KI-Chat im Workspace“ |
|
||||||
|
| **Hauptbild** | **Screen Recording** oder Mockup | Echte UI bevorzugt; Demo-Mandat, anonymisiert |
|
||||||
|
| **Seitenleiste** (optional) | `www.poweron.swiss` vertikal | Wiedererkennung wie auf eurer Referenzfolie |
|
||||||
|
|
||||||
|
**Social-Best-Practice dazu:** Pro Stufe **nur eine Kernaussage** lesbar halten; bei **Sound off** muessen **Nummer + Headline** allein schon den Nutzen transportieren.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Namensfuehrung
|
||||||
|
|
||||||
|
| Kontext | Bezeichnung |
|
||||||
|
|---------|-------------|
|
||||||
|
| Stufen-Headlines / Voiceover (Kunde) | **PowerOn Desktop**, „**Ihr KI-Arbeitsplatz**“ |
|
||||||
|
| Screenshot (Navigation in der App) | **AI Workspace** (ggf. Voice: „PowerOn Desktop – der AI Workspace“) |
|
||||||
|
| Marke | **PowerOn** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Empfohlene Stufenfolge (PowerOn Desktop)
|
||||||
|
|
||||||
|
Die Reihenfolge ist fuer **Verstaendnis** optimiert: erst **Gesamtbild**, dann **Arbeiten mit KI**, dann **Daten**, dann **Kontextsteuerung**, dann **Quellen**, dann **Kontrolle**, dann **Transparenz**, zuletzt **CTA**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 0 — Einstieg (optional, 2–3 s)
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Badge | `PowerOn` oder `Neu` |
|
||||||
|
| Nummer | — oder kleines `Start` |
|
||||||
|
| Headline | **Ihr KI-Arbeitsplatz in einem Workspace** |
|
||||||
|
| Text | Chat, Dateien und Quellen zusammen — statt staendig zwischen Tools zu wechseln. |
|
||||||
|
| Screenshot | Sehr kurze **Gesamtansicht** AI Workspace (drei Spalten andeuten) oder nur Logo + Farbflaeche |
|
||||||
|
| Footer (optional) | **Mehr Ueberblick. Weniger Medienbruch.** |
|
||||||
|
|
||||||
|
**Voiceover (optional):**
|
||||||
|
> „PowerOn Desktop: alles, was Sie fuer produktives Arbeiten mit KI brauchen — an einem Ort.“
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 1 — Ein Workspace, alles verbunden
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **1** |
|
||||||
|
| Headline | **Alles im Blick** |
|
||||||
|
| Text | Chats, Dateien und Datenquellen in **einer** Oberflaeche. Sie behalten den Faden vom ersten Satz bis zur fertigen Ausarbeitung. |
|
||||||
|
| Mittelband | `AI Workspace` |
|
||||||
|
| Screenshot | **Workspace Gesamtansicht:** links Tabs **Chats / Files / Sources**, Mitte Chat, rechts **Activity** oder **Preview** |
|
||||||
|
| Footer (optional) | **Ein Ort statt fuenf Fenster** |
|
||||||
|
|
||||||
|
**Was im Recording zeigen:** 2–3 s ruhig auf Layout verweilen; Cursor einmal links ueber die drei Tabs fuehren (ohne Hektik).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 2 — Mit KI arbeiten, mit Kontext
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **2** |
|
||||||
|
| Headline | **Fragen. Antworten. Nachvollziehbar.** |
|
||||||
|
| Text | Der **KI-Chat** nutzt Ihre gebundenen Inhalte — Antworten lassen sich an **Quellen** und Schritten nachvollziehen, nicht nur „aus dem Bauch“ der KI. |
|
||||||
|
| Mittelband | `KI-Chat` |
|
||||||
|
| Screenshot | Aktive Konversation mit **sichtbarer** Antwort; wenn moeglich **Quellen** oder Anhaenge andeuten |
|
||||||
|
| Footer (optional) | **Weniger Raetselraten** |
|
||||||
|
|
||||||
|
**Hinweis:** Demo-Frage so waehlen, dass die Antwort in 2–3 s lesbar ist (kurzer Absatz).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 3 — Dateien ablegen und mitnehmen
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **3** |
|
||||||
|
| Headline | **Einfach ablegen** |
|
||||||
|
| Text | **Dateien** im Workspace ablegen, sortieren und direkt als Kontext fuer die KI nutzen — analog zu eurem Trustee-Beispiel mit klarer **Drag-and-Drop**-Botschaft. |
|
||||||
|
| Mittelband | `Dateien` / `Files` |
|
||||||
|
| Screenshot | Tab **Files**: Ordnerliste oder **Drag-and-Drop**-Zone kurz zeigen (Datei markieren oder in Zone ziehen) |
|
||||||
|
| Footer (optional) | **Einfacher Drag and Drop** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 4 — Wer sieht was? Kontextsteuerung pro Datei
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **4** |
|
||||||
|
| Headline | **Wer sieht was?** |
|
||||||
|
| Text | Fuer jede Datei bestimmen Sie mit **einem Klick**, ob sie **persoenlich** bleibt, im **Team** (Instanz) sichtbar wird oder dem ganzen **Mandanten** zur Verfuegung steht. Die KI nutzt genau diesen Kontext. |
|
||||||
|
| Mittelband | `Kontextsteuerung` / `Scope` |
|
||||||
|
| Screenshot | Tab **Files** mit sichtbaren **Scope-Icons** neben den Dateinamen: 👤 Persoenlich, 👥 Instanz, 🏢 Mandant; Cursor klickt ein Icon — es wechselt zur naechsten Stufe |
|
||||||
|
| Footer (optional) | **Ein Klick. Drei Stufen.** |
|
||||||
|
|
||||||
|
**Was im Recording zeigen:** Files-Tab mit mind. 3 Dateien, jede mit sichtbarem Scope-Icon. Klick auf ein Icon — Wechsel von 👤 (Persoenlich) zu 👥 (Instanz). Kurz verweilen, damit die **Legende** unten sichtbar ist (Persoenlich / Instanz / Mandant).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 5 — Eigene Systeme einbinden
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **5** |
|
||||||
|
| Headline | **Ihre Datenquellen, Ihr Kontext** |
|
||||||
|
| Text | Cloud und Fachsysteme als **Quellen** anbinden — die KI arbeitet mit dem, was Sie **freigeben**, nicht mit beliebigem Internetwissen. |
|
||||||
|
| Mittelband | `Datenquellen` / `Sources` |
|
||||||
|
| Screenshot | Tab **Sources**: mind. eine verbundene Quelle sichtbar (farbcodierte Eintraege); keine echten Mandantendaten |
|
||||||
|
| Footer (optional) | **Gebunden statt raten** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 6 — Aenderungen pruefen, dann freigeben
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **6** |
|
||||||
|
| Headline | **Sie entscheiden** |
|
||||||
|
| Text | Im **Editor** sehen Sie Aenderungen im **Vergleich** — **annehmen** oder **ablehnen**. Tempo von KI, Kontrolle bei Ihnen. |
|
||||||
|
| Mittelband | `Aenderungen pruefen` / `File Edit Review` |
|
||||||
|
| Screenshot | **Editor** mit Diff / Vorher–Nachher oder Aktionsleiste **Accept / Reject** |
|
||||||
|
| Footer (optional) | **Freigabe bleibt bei Ihnen** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 7 — Transparenz bei der Arbeit
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **7** |
|
||||||
|
| Headline | **Nachvollziehbar fuer Teams** |
|
||||||
|
| Text | Die **Aktivitaets**-Ansicht zeigt, was im Hintergrund laeuft — gut fuer Vertrauen im Team und fuer Fuehrungskraefte, die Steuerung wollen. |
|
||||||
|
| Mittelband | `Aktivitaet` / `Activity` |
|
||||||
|
| Screenshot | Rechte Spalte **Activity** mit Eintraegen / Status |
|
||||||
|
| Footer (optional) | **Keine Blackbox** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Stufe 8 — Abschluss + CTA (3–5 s)
|
||||||
|
|
||||||
|
| Feld | Vorschlag |
|
||||||
|
|------|-----------|
|
||||||
|
| Nummer | **8** oder weglassen |
|
||||||
|
| Headline | **PowerOn Desktop** |
|
||||||
|
| Text | Produktiv mit KI arbeiten — mit Struktur, Daten und Kontrolle. |
|
||||||
|
| Screenshot | Wieder **Gesamtansicht** oder nur Markenflaeche |
|
||||||
|
| Footer | **Demo auf poweron.swiss** — URL nach Freigabe |
|
||||||
|
|
||||||
|
| Element | Wert |
|
||||||
|
|---------|------|
|
||||||
|
| Primaer-Link | `https://____________` |
|
||||||
|
| UTM (optional) | `?utm_source=___&utm_medium=paid_social&utm_campaign=ai_desktop` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Kurzvariante (ca. 30 s)
|
||||||
|
|
||||||
|
Nur **Stufen 0 → 1 → 2 → 6 → 8** (Ueberblick, Chat, Editor-Kontrolle, CTA).
|
||||||
|
Pro Stufe **~4 s**.
|
||||||
|
Optional **Stufe 4** (Kontextsteuerung) ergaenzen, wenn die Datei-Sichtbarkeit betont werden soll (+4 s).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schnitt-Timeline (Referenz, alle 8 inkl. 0)
|
||||||
|
|
||||||
|
| Stufe | ca. Sekunden |
|
||||||
|
|-------|----------------|
|
||||||
|
| 0 Einstieg | 2–3 |
|
||||||
|
| 1 Workspace | 4–5 |
|
||||||
|
| 2 KI-Chat | 4–5 |
|
||||||
|
| 3 Dateien | 3–4 |
|
||||||
|
| 4 Kontextsteuerung | 3–4 |
|
||||||
|
| 5 Quellen | 3–4 |
|
||||||
|
| 6 Editor | 4–5 |
|
||||||
|
| 7 Aktivitaet | 3–4 |
|
||||||
|
| 8 CTA | 3–5 |
|
||||||
|
|
||||||
|
**Summe:** etwa **48–65 s** — kuerzbar durch Weglassen von 3, 4, 5 oder 7.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Mockups vs. Screen Recordings
|
||||||
|
|
||||||
|
| Stufe | Empfehlung |
|
||||||
|
|-------|------------|
|
||||||
|
| 0, 8 | Mockup oder reduzierte UI + starke Typo erlaubt |
|
||||||
|
| 1–7 | **Echtes Screen Recording** aus Demo-Instanz (Zoom 100–125 %, ruhiger Cursor) |
|
||||||
|
|
||||||
|
Uebergaenge: kurzer **Push** oder **Match-Cut** auf die naechste Schrittnummer — konsistent mit eurem Trustee-Stil (Farbe, Schrift, www.poweron.swiss).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Social-Media-Best Practices (kompakt)
|
||||||
|
|
||||||
|
- **Erste Sekunde:** Bewegung oder klare Schritt-`1`-Flaeche — sonst Swipe weg.
|
||||||
|
- **Ein Gedanke pro Stufe:** Headline + ein Satz Text reichen.
|
||||||
|
- **Ohne Ton:** alles Wichtige als **grossen Text** im Bild; Untertitel zusaetzlich.
|
||||||
|
- **9:16:** Text nicht in die untere Drittel-Social-UI legen; **sichere Zone** einplanen.
|
||||||
|
- **Keine Echtdaten** in Screens; Demo-Mandat, anonymisierte Namen/Dateien.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Untertitel-Zeile pro Stufe (Sprechertext)
|
||||||
|
|
||||||
|
1. „PowerOn Desktop — Ihr KI-Arbeitsplatz.“
|
||||||
|
2. „Alles verbunden: Chat, Dateien, Quellen.“
|
||||||
|
3. „Der Chat nutzt Ihren Kontext — nachvollziehbar.“
|
||||||
|
4. „Dateien ablegen — per Drag and Drop.“
|
||||||
|
5. „Persoenlich, Team oder Mandant — Sie bestimmen, wer was sieht.“
|
||||||
|
6. „Ihre Systeme als Quellen — bewusst freigegeben.“
|
||||||
|
7. „Aenderungen pruefen — annehmen oder ablehnen.“
|
||||||
|
8. „Aktivitaet sichtbar — keine Blackbox.“
|
||||||
|
9. „Demo: poweron.swiss.“
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Marken- und Rechts-Checkliste
|
||||||
|
|
||||||
|
- [ ] **PowerOn** Schreibweise
|
||||||
|
- [ ] Feature in UI: **AI Workspace**; Werbetext: **PowerOn Desktop**
|
||||||
|
- [ ] Keine personenbezogenen / Kundenechtdaten in Aufnahmen
|
||||||
|
- [ ] Musik lizenziert
|
||||||
|
- [ ] Starke Datenschutz-Aussagen nur nach Legal-Abstimmung
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verwandte interne Doku
|
||||||
|
|
||||||
|
- [case-study-power-desktop.md](case-study-power-desktop.md) — Argumente und Tiefe
|
||||||
|
- [product-teaser-poweron.md](product-teaser-poweron.md) — Plattform
|
||||||
|
- [social-clip-poweron-treuhand.md](social-clip-poweron-treuhand.md) — anderes Feature, gleiches **Stufen-Prinzip** mit Screens
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*Stand: April 2026*
|
||||||
160
docs/social-clip-poweron-treuhand.md
Normal file
160
docs/social-clip-poweron-treuhand.md
Normal file
|
|
@ -0,0 +1,160 @@
|
||||||
|
# Social-Media-Kurzclip: PowerOn Treuhand
|
||||||
|
|
||||||
|
Produktionshandbuch fuer **reine Screen-Aufnahmen**: On-Screen-Texte, Screen-Storyboard und Freigaben. Ziel: **20-40 s** (Reels, Shorts, LinkedIn). Ton: sachlich-treuhaenderisch.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Grundsetup fuer Screen-Only
|
||||||
|
|
||||||
|
- Kein Interview-Footage einplanen, nur UI-Aufnahmen aus PowerOn.
|
||||||
|
- On-Screen-Texte kurz halten (max. 6-8 Woerter pro Karte).
|
||||||
|
- 9:16 schneiden (1080x1920) oder aus 16:9 sauber croppen.
|
||||||
|
- Alle Daten in Demo-Instanz anonymisieren.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Shot 1: Customer Story (3-5 s)
|
||||||
|
|
||||||
|
### On-Screen — Variante A (generisch)
|
||||||
|
|
||||||
|
| Karte | Text |ca. Zeichen |
|
||||||
|
|-------|------|------------|
|
||||||
|
| 1 | Treuhand im Alltag | kurz |
|
||||||
|
| 2 | Ein Team. Viele Mandate. | kurz |
|
||||||
|
|
||||||
|
### On-Screen — Variante B (persona)
|
||||||
|
|
||||||
|
| Karte | Text |
|
||||||
|
|-------|------|
|
||||||
|
| 1 | Fiduciary / Treuhänder:in |
|
||||||
|
| 2 | Belege. Buchhaltung. Verantwortung. |
|
||||||
|
|
||||||
|
### Screen-Aufnahme (statt Interview)
|
||||||
|
|
||||||
|
- Navigiere zu **Treuhand > Uebersicht (Dashboard)**.
|
||||||
|
- Zeige 1-2 Sekunden die gesamte Seite, dann kurzer Fokus auf Kacheln.
|
||||||
|
- Sichtbar: `Positionen`, `Dokumente`, `Buchhaltung`.
|
||||||
|
|
||||||
|
### Optionales Voiceover
|
||||||
|
|
||||||
|
> „Treuhand im Alltag: viele Mandate, viele Belege, hohe Verantwortung.“
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Shot 2: Pain — Before (4-8 s)
|
||||||
|
|
||||||
|
### On-Screen — Standard (4 Karten, schneller Wechsel)
|
||||||
|
|
||||||
|
1. `Before:`
|
||||||
|
2. `Belege überall`
|
||||||
|
3. `manuell abtippen`
|
||||||
|
4. `keine klare Zuordnung`
|
||||||
|
|
||||||
|
### On-Screen — Schnitt-Variante (3 Karten)
|
||||||
|
|
||||||
|
1. `Before: Chaos in Postfach & Ordnern`
|
||||||
|
2. `Excel & Copy-Paste`
|
||||||
|
3. `Prüfung? Lücken in der Akte`
|
||||||
|
|
||||||
|
### Voiceover (optional)
|
||||||
|
|
||||||
|
> „Vorher: verteilte Belege, manuelle Schritte und wenig Transparenz.“
|
||||||
|
|
||||||
|
### Screen-Aufnahme (Pain visuell zeigen)
|
||||||
|
|
||||||
|
- In **Dokumente** kurz eine unstrukturierte Liste zeigen.
|
||||||
|
- Dann in **Positionen** kurz eine manuelle Erfassung andeuten (z. B. Tabelle ohne Zuordnung).
|
||||||
|
- Optional 0.5-1 s auf fehlende Zuordnung wechseln (noch nicht `Zuordnungen` zeigen).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Shot 3: After — PowerOn Treuhand (10-18 s)
|
||||||
|
|
||||||
|
### On-Screen (4 Karten, über den Screen gelegt)
|
||||||
|
|
||||||
|
1. `After:`
|
||||||
|
2. `PowerOn Treuhand`
|
||||||
|
3. `eine Instanz · klare Akte`
|
||||||
|
4. `bis zur Buchhaltung`
|
||||||
|
|
||||||
|
### Voiceover (optional)
|
||||||
|
|
||||||
|
> „Mit PowerOn Treuhand ist alles pro Mandat gebuendelt: Positionen, Dokumente, Zuordnungen und Sync.“
|
||||||
|
|
||||||
|
### Screen-Aufnahmen — Storyboard (Reihenfolge)
|
||||||
|
|
||||||
|
**Technik:** Demo- oder Schulungsmandat; **keine realen Mandantennamen**; Cursor ruhig; ideal **9:16** (1080x1920) oder Ausschnitt.
|
||||||
|
|
||||||
|
| Nr. | Dauer | Navigation | Sichtbar machen |
|
||||||
|
|-----|-------|------------|------------------|
|
||||||
|
| 1 | 3-4 s | Treuhand -> **Uebersicht** (Dashboard) | Kacheln: Positionen, Dokumente, Buchhaltung; Bereich **Instanz-Details** (Instanz, Mandant) |
|
||||||
|
| 2 | 2-3 s | **Positionen** | Tabelle mit mind. einer Zeile; optional **Sync-Status-Spalte**; kurz Zeile anklicken oder markieren |
|
||||||
|
| 3 | 2-3 s | **Positionen** (optional) | **Mehrfachauswahl** einer Zeile -> Aktion **Sync zur Buchhaltung** (nur wenn Demo OK); sonst Nr. 2 verlaengern |
|
||||||
|
| 4 | 2-3 s | **Dokumente** | Liste; **Download** auf eine Zeile oder Upload-Dialog starten (ohne sensible Dateinamen) |
|
||||||
|
| 5 | 2-3 s | **Zuordnungen** | Mind. eine Zeile: Verknuepfung **Position <-> Dokument** lesbar |
|
||||||
|
| 6 | 2-4 s | *Entweder* **Scannen / Hochladen** *oder* **Spesen Import** | **Scannen:** PDF/JPG per Drag-and-Drop -> **Pipeline-Status** (laeuft/fertig). **Spesen:** verbundene Microsoft-/Ordner-Ansicht + Automation sichtbar aktiv |
|
||||||
|
|
||||||
|
**Kürzestes Set (wenn Zeit knapp):** nur **1 → 2 → 5** (Dashboard, Positionen, Zuordnungen).
|
||||||
|
|
||||||
|
**Nicht noetig im Kurzclip:** lange Passagen **Buchhaltungseinstellungen** oder **Rollen & Rechte**; hoechstens der Hinweis „Buchhaltung konfiguriert“ auf dem Dashboard.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Shot 4: Closing (3-5 s)
|
||||||
|
|
||||||
|
### On-Screen — Englisch
|
||||||
|
|
||||||
|
- `Real business.`
|
||||||
|
- `Real wins.`
|
||||||
|
|
||||||
|
### On-Screen — Deutsch (Alternative)
|
||||||
|
|
||||||
|
- `Echtes Geschäft.`
|
||||||
|
- `Messbarer Gewinn.`
|
||||||
|
|
||||||
|
### On-Screen — Ultra-kurz
|
||||||
|
|
||||||
|
- `Weniger Handarbeit. Mehr Nachweis.`
|
||||||
|
|
||||||
|
### CTA (letzte Karte)
|
||||||
|
|
||||||
|
Setzen Sie die **verbindliche URL** nach Freigabe ein:
|
||||||
|
|
||||||
|
| Element | Wert |
|
||||||
|
|---------|------|
|
||||||
|
| Primär-Link | `https://____________` |
|
||||||
|
| Tracking (UTM) | optional `?utm_source=___&utm_medium=social` |
|
||||||
|
|
||||||
|
### Voiceover (optional)
|
||||||
|
|
||||||
|
> „Real business. Real wins.“
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Marken- und Rechts-Checkliste
|
||||||
|
|
||||||
|
- [ ] Schreibweise **PowerOn** (nicht Power On / poweron außerhalb der Domain)
|
||||||
|
- [ ] Feature-Bezeichnung konsistent mit Navigation: **Treuhand** bzw. interner Label „Trustee“
|
||||||
|
- [ ] Bei reinem Screen-Clip: keine Personen/Gesichter sichtbar
|
||||||
|
- [ ] Keine **geschützten Kundendaten** in Screens (Demo-Mandat)
|
||||||
|
- [ ] Musik: Lizenz / Ton über Plattform-Library
|
||||||
|
- [ ] Falls Mitarbeitende sichtbar: **Bildrechte** oder Silhouette/Blur
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Schnitt-Timeline (Referenz)
|
||||||
|
|
||||||
|
| Block | Ziel-Länge |
|
||||||
|
|-------|------------|
|
||||||
|
| Customer Story | 3-5 s |
|
||||||
|
| Before | 4-8 s |
|
||||||
|
| After (Screen-Only) | 10-18 s |
|
||||||
|
| Closing + CTA | 3-5 s |
|
||||||
|
|
||||||
|
**Gesamt:** ca. 20–35 s; bei längerer Musik + Logo-Stinger bis 40 s.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Verwandte interne Doku
|
||||||
|
|
||||||
|
- [product-teaser-billing-poweron.md](product-teaser-billing-poweron.md) — Markenkontext PowerOn
|
||||||
|
|
@ -55,6 +55,8 @@ Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/conn
|
||||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
||||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
||||||
STRIPE_API_VERSION = 2026-01-28.clover
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||||
|
|
||||||
# AI configuration
|
# AI configuration
|
||||||
Connector_AiOpenai_API_SECRET = DEV_ENC:Z0FBQUFBQnBaSnM4TWFRRmxVQmNQblVIYmc1Y0Q3aW9zZUtDWlNWdGZjbFpncGp2NHN2QjkxMWxibUJnZDBId252MWk5TXN3Yk14ajFIdi1CTkx2ZWx2QzF5OFR6LUx5azQ3dnNLaXJBOHNxc0tlWmtZcTFVelF4eXBSM2JkbHd2eTM0VHNXdHNtVUprZWtPVzctNlJsZHNmM20tU1N6Q1Q2cHFYSi1tNlhZNDNabTVuaEVGWmIydEhadTcyMlBURmw2aUJxOF9GTzR0dTZiNGZfOFlHaVpPZ1A1LXhhOEFtN1J5TEVNNWtMcGpyNkMzSl8xRnZsaTF1WTZrOUZmb0cxVURjSGFLS2dIYTQyZEJtTm90bEYxVWxNNXVPdTVjaVhYbXhxT3JsVDM5VjZMVFZKSE1tZnM9
|
Connector_AiOpenai_API_SECRET = DEV_ENC:Z0FBQUFBQnBaSnM4TWFRRmxVQmNQblVIYmc1Y0Q3aW9zZUtDWlNWdGZjbFpncGp2NHN2QjkxMWxibUJnZDBId252MWk5TXN3Yk14ajFIdi1CTkx2ZWx2QzF5OFR6LUx5azQ3dnNLaXJBOHNxc0tlWmtZcTFVelF4eXBSM2JkbHd2eTM0VHNXdHNtVUprZWtPVzctNlJsZHNmM20tU1N6Q1Q2cHFYSi1tNlhZNDNabTVuaEVGWmIydEhadTcyMlBURmw2aUJxOF9GTzR0dTZiNGZfOFlHaVpPZ1A1LXhhOEFtN1J5TEVNNWtMcGpyNkMzSl8xRnZsaTF1WTZrOUZmb0cxVURjSGFLS2dIYTQyZEJtTm90bEYxVWxNNXVPdTVjaVhYbXhxT3JsVDM5VjZMVFZKSE1tZnM9
|
||||||
|
|
@ -80,6 +82,8 @@ TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerl
|
||||||
# Debug Configuration
|
# Debug Configuration
|
||||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
||||||
APP_DEBUG_CHAT_WORKFLOW_DIR = D:/Athi/Local/Web/poweron/local/debug
|
APP_DEBUG_CHAT_WORKFLOW_DIR = D:/Athi/Local/Web/poweron/local/debug
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = True
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = D:/Athi/Local/Web/poweron/local/debug/sync
|
||||||
|
|
||||||
# Manadate Pre-Processing Servers
|
# Manadate Pre-Processing Servers
|
||||||
PREPROCESS_ALTHAUS_CHAT_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGbEphQ3ZUMlFMQ2EwSGpoSE9NNzRJNTJtaGk1N0RGakdIYnVVeVFHZmF5OXB3QTVWLVNaZk9wNkhfQkZWRnVwRGRxem9iRzJIWXdpX1NIN2FwSExfT3c9PQ==
|
PREPROCESS_ALTHAUS_CHAT_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGbEphQ3ZUMlFMQ2EwSGpoSE9NNzRJNTJtaGk1N0RGakdIYnVVeVFHZmF5OXB3QTVWLVNaZk9wNkhfQkZWRnVwRGRxem9iRzJIWXdpX1NIN2FwSExfT3c9PQ==
|
||||||
|
|
|
||||||
|
|
@ -55,6 +55,8 @@ Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/conn
|
||||||
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
||||||
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
||||||
STRIPE_API_VERSION = 2026-01-28.clover
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||||
|
|
||||||
# AI configuration
|
# AI configuration
|
||||||
Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4MENkQ2xJVmE5WFZKUkh2SHJFby1YVXN3ZmVxRkptS3ZWRmlwdU93ZEJjSjlMV2NGbU5mS3NCdmFfcmFYTEJNZXFIQ3ozTWE4ZC1pemlQNk9wbjU1d3BPS0ZCTTZfOF8yWmVXMWx0TU1DamlJLVFhSTJXclZsY3hMVWlPcXVqQWtMdER4T252NHZUWEhUOTdIN1VGR3ltazEweXFqQ0lvb0hYWmxQQnpxb0JwcFNhRDNGWXdoRTVJWm9FalZpTUF5b1RqZlRaYnVKYkp0NWR5Vko1WWJ0Wmg2VWJzYXZ0Z3Q4UkpsTldDX2dsekhKMmM4YjRoa2RwemMwYVQwM2cyMFlvaU5mOTVTWGlROU8xY2ZVRXlxZzJqWkxURWlGZGI2STZNb0NpdEtWUnM9
|
Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4MENkQ2xJVmE5WFZKUkh2SHJFby1YVXN3ZmVxRkptS3ZWRmlwdU93ZEJjSjlMV2NGbU5mS3NCdmFfcmFYTEJNZXFIQ3ozTWE4ZC1pemlQNk9wbjU1d3BPS0ZCTTZfOF8yWmVXMWx0TU1DamlJLVFhSTJXclZsY3hMVWlPcXVqQWtMdER4T252NHZUWEhUOTdIN1VGR3ltazEweXFqQ0lvb0hYWmxQQnpxb0JwcFNhRDNGWXdoRTVJWm9FalZpTUF5b1RqZlRaYnVKYkp0NWR5Vko1WWJ0Wmg2VWJzYXZ0Z3Q4UkpsTldDX2dsekhKMmM4YjRoa2RwemMwYVQwM2cyMFlvaU5mOTVTWGlROU8xY2ZVRXlxZzJqWkxURWlGZGI2STZNb0NpdEtWUnM9
|
||||||
|
|
@ -78,6 +80,8 @@ TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerl
|
||||||
# Debug Configuration
|
# Debug Configuration
|
||||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
# Manadate Pre-Processing Servers
|
# Manadate Pre-Processing Servers
|
||||||
PREPROCESS_ALTHAUS_CHAT_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4UkNBelhvckxCQUVjZm94N3BZUDcxaEMyckE2dm1lRVhqODhrWU1SUjNXZ3dQZlVJOWhveXFkZXpobW5xT0NneGZ2SkNUblFmYXd0WTBYNTl3UmRnSWc9PQ==
|
PREPROCESS_ALTHAUS_CHAT_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4UkNBelhvckxCQUVjZm94N3BZUDcxaEMyckE2dm1lRVhqODhrWU1SUjNXZ3dQZlVJOWhveXFkZXpobW5xT0NneGZ2SkNUblFmYXd0WTBYNTl3UmRnSWc9PQ==
|
||||||
|
|
|
||||||
|
|
@ -49,12 +49,15 @@ Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/g
|
||||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
# Stripe Billing (both end with _SECRET for encryption script)
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
STRIPE_API_VERSION = 2026-01-28.clover
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
|
||||||
|
|
||||||
|
|
||||||
# AI configuration
|
# AI configuration
|
||||||
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
||||||
|
|
@ -78,6 +81,8 @@ TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerl
|
||||||
# Debug Configuration
|
# Debug Configuration
|
||||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
# Manadate Pre-Processing Servers
|
# Manadate Pre-Processing Servers
|
||||||
PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
|
PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
|
||||||
|
|
|
||||||
96
env_prod_forgejo.env
Normal file
96
env_prod_forgejo.env
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
# Production Environment Configuration
|
||||||
|
|
||||||
|
# System Configuration
|
||||||
|
APP_ENV_TYPE = prod
|
||||||
|
APP_ENV_LABEL = Production Instance Forgejo
|
||||||
|
APP_KEY_SYSVAR = /srv/gateway/shared/secrets/master_key.txt
|
||||||
|
APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
|
||||||
|
APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
|
||||||
|
APP_API_URL = https://api.poweron.swiss
|
||||||
|
|
||||||
|
# PostgreSQL DB Host
|
||||||
|
DB_HOST=10.20.0.21
|
||||||
|
DB_USER=poweron_dev
|
||||||
|
DB_PASSWORD_SECRET = mypassword
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
# Security Configuration
|
||||||
|
APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUlV5SVpaWXBNX1hpa2xPZGdMSWpnN2ZINHQxeGZnNHJweU5pZjlyYlY5Qm9zOUZEbl9wUEgtZHZXd1NhR19JSG9kbFU4MnFGQnllbFhRQVphRGQyNHlFVWR5VHQyUUpqN0stUmRuY2QyTi1oalczRHpLTEJqWURjZWs4YjZvT2U5YnFqcXEwdEpxV05fX05QMmtrPQ==
|
||||||
|
APP_TOKEN_EXPIRY=300
|
||||||
|
|
||||||
|
# CORS Configuration
|
||||||
|
APP_ALLOWED_ORIGINS=https://porta.poweron.swiss
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||||
|
APP_LOGGING_LOG_DIR = srv/gateway/shared/logs
|
||||||
|
APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
|
||||||
|
APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
|
||||||
|
APP_LOGGING_CONSOLE_ENABLED = True
|
||||||
|
APP_LOGGING_FILE_ENABLED = True
|
||||||
|
APP_LOGGING_ROTATION_SIZE = 10485760
|
||||||
|
APP_LOGGING_BACKUP_COUNT = 5
|
||||||
|
|
||||||
|
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||||
|
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_AUTH_REDIRECT_URI=https://api.poweron.swiss/api/msft/auth/login/callback
|
||||||
|
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_DATA_REDIRECT_URI = https://api.poweron.swiss/api/msft/auth/connect/callback
|
||||||
|
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_AUTH_REDIRECT_URI =
|
||||||
|
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_DATA_REDIRECT_URI =
|
||||||
|
|
||||||
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
|
||||||
|
|
||||||
|
|
||||||
|
# AI configuration
|
||||||
|
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
||||||
|
Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
|
||||||
|
Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
|
||||||
|
Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
|
||||||
|
Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
|
||||||
|
Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
|
||||||
|
|
||||||
|
Service_MSFT_TENANT_ID = common
|
||||||
|
|
||||||
|
# Google Cloud Speech Services configuration
|
||||||
|
Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4NFQxaF9uN3h1cVB6dnZid1c1R1VfNDlSQ1NHMEVDZWtKanpMQ29CLXc1MXBqRm1hQ0YtWVhaejBMY1ZTOEFEVlpWQ3hrYkFza1E2RDNsYkdMMndNR0VGNTMwVDRGdURJY3hyaVFxVjEtSEYwNHJzeWM3WmlpZW9jU2E3NTgycEV2allqQ3dJRTNyRFAzaDJ6dklKeXpNRkJhYjFzUkptN2dpbkNpMklrcGxuZl9vTkt3T0JvNm1YTXd5UlkwZWptUXdWVFpnV2J4X3J2WUhIUlFkSElFVnlqMnlJRnNHTnlpMWs2R1dZc2ROWjNYZG85cndmd1E5cUZnVmZRYnVjTG43dXFmSWd2bGFfVWFWSmtpWkpndWNlSUNwcnFNU2NqZXFaV0xsY3l3SElLRkVHcHZGZERKV1ltcGhTS0dhTko1VTJLYzNoZjRkSGVEX3dTMWVVTmdDczV5cE1JQUdSbUJGUm11eFhTVjJHbkt0SzB4UG1Dc2xmbnp1Y041Y2RTeWRuWGdmQy1sTGx0MGtnM2VJQ3EyLXViRlNhTU9ybzZkR1N1bXE5SXhlZENWRFpWSGlYOWx4SUQ3UlR0ZEVxQkxNakRUVFRiUmFnbklOalphLUZkRFVVaXBRUk5NZW5PaUZydTFmQkNPSTdTVTNZd0plWXllNVFJdmN4MVcyTGlwMGFtVjBzOGRxR1FjbzhfYW5zdTB0ZEZBTTJhakltazh1dktNMUZsOUItdFdTb1pIaUxySllXNkdlY20zUS0wTnpFNTB2SU5acG1VcXhyaHBmME8takw3RDh5T043T2VGOV92TzNya2pWSlpYVjZDdXlZcjM3a0hPTlhkaW9oQmxqQlpGRFYyTTY4WmZmT3k4Tk1tdXRuSGdTUVpNT2NKenhXb05PdXBfSEdhMTNxNjdpNXlKUUI2YUgydFFPX1VvXzVJb0UxWTU2YVNiNDQ0QndZanhMMHR1cGdHWGhvcEg1QXEtSXZJdTdZUE12ZEVVWkF4QmtsQS1GYnY3SFIxSHlsOGVfcEpGS1A4QUVEQWNEOFZYYlljQ3ByTU03YU16Y0UzUnJQZEprSWNjT1ZXVEtDWi03Y3ZzRVdYUTlabXJISEo5THRHVXVuM0xqbzA4bGVlZVpOMk1QMmptb21tV0pTMlVoOXdWVU95UW1iQmttc2w1RG9mMWwxXzg1T2IxYUVmTUJEZkpUdTFDTzZ3RlBFeUFiX01iRTZNWkNaSG45TkFOM2pzbUJRZ2N0VFpoejJUTG1RODY3TzZpSzVkYUQzaEpfY2pSTkRzU0VpanlkdXVQQmJ2WU5peno4QWNLTDVxZTlhSHI3NnNiM0k0Y3JkQ0xaOU05bGtsQl8zQklvaktWSDZ4aVp2MHlYelJuUDJyTU9CZC1OZjJxNFc1dDcwSUlxaVh1LTMyWWFwU0IwUU9kOUFpMWpnOERtLTh1VmJiNGVwcXBMbU5fMjVZc0hFbmxQT2puSFd1ZGpyTkphLU5sVlBZWWxrWEZrWGJQWmVkN19tZFZfZ1l1V3pSWlA0V0ZxM2lrWnl2NU9WeTdCbDROSmhfeENKTFhMVXk1d195S2JMUFJoRXZjcVo4V2g0MTNKRnZhUE1wRkNPM3FZOGdVazJPeW5PSGpuZnFGTTdJMkRnam5rUlV6NFlqODlIelRYaEN5VjdJNnVwbllNODNCTFRHMWlXbmM1VlRxbXB3Wm9LRjVrQUpjYzRNMThUMWwwSVhBMUlyamtPZnE4R0o4bEdHay1zMjR5RDJkZ1lYRHZaNHVHU2otR3ZpN25LZlEySEU0UmdTNzJGVHNWQXMyb0dVMV9WUE13ODhZWUFaakxGOWZieGNXZkNYRnV5djEyWTZLcmdrajRBLU1rS1Z0VVRkOWlDMU9fMGVmYXFhZXJGMUhpNkdmb2hkbzZ1OWV6VlNmVzNISjVYTFh6SjJNdWR5MWZidE8yVEo2dnRrZXhMRXBPczUwTG13OGhNUVpIQm0zQmRKRnJ0Nl8wNW1Ob0dHRDVpU0NWREV3TkY2SjktdVBkMFU1ZXBmSFpHQ3FHNTRZdTJvaExpZVEtLTU4YTVyeFBpNDdEajZtWUc4c1dBeUJqQ3NIY1NLS0FIMUxGZzZxNFNkOG9ORGNHWWJCVnZuNnJVTEtoQi1mRTZyUl81ZWJJMi1KOGdERzBhNVRZeHRYUUlqY2JvMFlaNHhWMU9pWFFiZjdaLUhkaG15TTBPZVlkS2R5UVdENTI4QVFiY1RJV0ZNZnlpVWxfZmlnN1BXbGdrbjFGUkhzYl9qeHBxVVJacUE4bjZETENHVFpSamh0NVpOM2hMYTZjYzBuS3J0a3hhZGxSM1V5UHd2OTU3ZHY0Yy1xWDBkWUk0Ymp0MWVrS3YzSktKODhQZnY3QTZ1Wm1VZkZJbS1jamdreks1ZlhpQjFOUDFiOHJ2Nm9NcmdTdU5LQXV2RkZWZEFNZnVKUjVwcVY3dDdhQnpmRVJ6SmlvVXpDM0ZiYXh5bGE2X04tTE9qZ3BiTnN3TF9ZaFRxSUpjNjB1dXZBcy1TZHRHTjFjSUR3WUl4cE9VNzB5Rkk4U3Z1SVZYTl9sYXlZVk83UnFrMlVmcnBpam9lRUlCY19DdVJwOXl2TVVDV1pMRFZTZk9MY3Z1eXA0MnhGazc5YllQaWtOeTc4NjlOa2lGY05RRzY1cG9nbGpYelc4c3FicWxWRkg0YzRSamFlQ19zOU14YWJreU9pNDREZVJ3a0REMUxGTzF1XzI1bEF3VXVZRjlBeWFiLXJsOXgza3VZem1WckhWSnVNbDBNcldadU8xQ3RwOTl5NGgtVlR0QklCLWl5WkE4V1FlQTBCOVU1RE9sQlRrYUNZOGdfUmEwbEZvUTFGUEFWVmQ4V1FhOU9VNjZqemRpZm1sUDhZQTJ0YVBRbWZldkF5THV4QXpfdUtNZ0tlcGdSRFM3c0lDOTNQbnBxdmxYYWNpTmI3MW9BMlZIdTQ5RldudHpNQWQ5NDNPLVVTLXVVNzdHZXh4UXpZa3dVa2J4dTFDV1RkYjRnWXU2M3lJekRYWGNMcWU5OVh6U2xZWDh6MmpqcnpiOHlnMjA5S3RFQm1NZjNSM21adkVnTUpSYVhkTzNkNnJCTmljY0x1cl9kMkx3UHhySjZEdHREanZERzNEUTFlTkR0NWlBczAtdmFGTjdZNVpTMlkxV2czYW5RN2lqemg4eUViZDV6RjdKNXdFcUlvcVhoNkJ6eVJkR1pua1hnNzQwOEs2TXJYSlpGcW9qRDU2QjBOWFFtdXBJRkRKbmdZUF9ZSmRPVEtvUjVhLTV1NjdXQjRhS0duaEtJb2FrQnNjUTRvdFMxdkdTNk1NYlFHUFhhYTJ1eUN3WHN4UlJ4UjdrZjY0SzFGYWVFN1k0cGJnc1RjNmFUenR4NHljbVhablZSWHZmUVN3cXRHNjhsX1BSZWEzdTJUZFA0S2pTaU9YMnZIQ1ZPcGhWMFJqZkVEMWRMR1h3SnU0Z2FzZ3VGM3puNzdhVjhaQXNIWHFsbjB0TDVYSFdSNV9rdWhUUUhSZHBGYkJIVDB5SDdlMC13QTVnS0g5Qkg5RGNxSGJlelVndUhPcEQ0QkRKMTJTZUM1OXJhVm0zYjU0OVY2dk9MQVBheklIQXpVNW9Yc0ROVjEzaFZTWmVxYlBWMlNlSzladzJ6TmNuMG5FVVZkN1VZN1pfS2ZHa0lQcE80S24wSnQtVlJVV09OVWJ3M09YMkZpV2ktVF9ENHhKU2dfYUQ2aUVyamk0VHJHQmVfVHU4clpUTFoteW5aSWRPV1M0RDRMTms4NGRoYmJfVE82aUl2X3VieVJOdDhBQmRwdzdnRTVBNzZwaW93dUlZb3ZRYUtOeG9ULWxvNVp5a0haSjdkcUhRb3d6UGIxRUpCVkVYX2d6TkRqQVozUWxkNGFoc1FXYVd2YWNkME9Qclo0bjYxMFRWTy1nbnI5NTBJNzRMMDluUXRKYTFqQUN4d0d5aHVlamN3Tkk3NWJXeXR0TW9BeUg5Vnp4Q2RnZUY3b3AtMDlrNmlrSGR0eGRtbUdUd2lFRWg4MklEeWJHN2wwZEpVSXMxNDNOWjRFS0tPdWxhMmFCckhfRENIY184aEFDZXNrRDl2dHQtQW12UnRuQXJjaDJoTUpiYkNWQUtfRG9GMUZoNWM4UnBYZ29RWWs2NHcyUm5kdTF3Vk1GeFpiRUJLaVZ2UGFjbi1jV3lMV0N2ZDl4VERPN295X01NNG56ZjZkRzZoYUtmY1E5NlVXemx2SnVfb19iSXg0R2M3Mjd1a2JRPT0=
|
||||||
|
|
||||||
|
# Feature SyncDelta JIRA configuration
|
||||||
|
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4d3Z4d2x6N1FhUktMU0RKbkxfY2pTQkRzXzJ6UXVEbDNCaFM3UHMtQVFGYzNmYWs4N0lMM1R2SFJuZTVFVmx6MGVEbXc5U3NOTnY1TWN0ZDNaamlHQWloalM3VldmREJNSHQ1TlVkSVFJMTVhQWVGSVRMTGw4UTBqNGlQZFVuaHp4WUlKemR5UnBXZlh0REJFLXJ4ejR3PT0=
|
||||||
|
|
||||||
|
# Teamsbot Browser Bot Service
|
||||||
|
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||||
|
|
||||||
|
# Debug Configuration
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
|
# Manadate Pre-Processing Servers
|
||||||
|
PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
|
||||||
|
|
||||||
|
# Preprocessor API Configuration
|
||||||
|
PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
|
||||||
|
PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
|
||||||
|
|
||||||
|
# Azure Communication Services Email Configuration
|
||||||
|
MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
|
||||||
|
MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
|
||||||
|
|
@ -246,6 +246,8 @@ class AiPrivateLlm(BaseConnectorAi):
|
||||||
(OperationTypeEnum.DATA_GENERATE, 8),
|
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||||
(OperationTypeEnum.NEUTRALIZATION_TEXT, 9),
|
(OperationTypeEnum.NEUTRALIZATION_TEXT, 9),
|
||||||
|
# Agent loop (workspace etc.) selects models by OperationTypeEnum.AGENT for streaming.
|
||||||
|
(OperationTypeEnum.AGENT, 8),
|
||||||
),
|
),
|
||||||
version="qwen2.5:7b",
|
version="qwen2.5:7b",
|
||||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: PRICE_TEXT_PER_CALL
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: PRICE_TEXT_PER_CALL
|
||||||
|
|
|
||||||
|
|
@ -537,13 +537,19 @@ class DatabaseConnector:
|
||||||
try:
|
try:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"""
|
"""
|
||||||
SELECT column_name FROM information_schema.columns
|
SELECT column_name, data_type
|
||||||
|
FROM information_schema.columns
|
||||||
WHERE LOWER(table_name) = LOWER(%s) AND table_schema = 'public'
|
WHERE LOWER(table_name) = LOWER(%s) AND table_schema = 'public'
|
||||||
""",
|
""",
|
||||||
(table,),
|
(table,),
|
||||||
)
|
)
|
||||||
|
existing_column_rows = cursor.fetchall()
|
||||||
existing_columns = {
|
existing_columns = {
|
||||||
row["column_name"] for row in cursor.fetchall()
|
row["column_name"] for row in existing_column_rows
|
||||||
|
}
|
||||||
|
existing_column_types = {
|
||||||
|
row["column_name"]: (row["data_type"] or "").lower()
|
||||||
|
for row in existing_column_rows
|
||||||
}
|
}
|
||||||
|
|
||||||
# Desired columns based on model
|
# Desired columns based on model
|
||||||
|
|
@ -569,6 +575,31 @@ class DatabaseConnector:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Could not add column '{col}' to '{table}': {add_err}"
|
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Targeted type-downgrade: if a model field has been
|
||||||
|
# changed from a structured type (JSONB) to a plain
|
||||||
|
# TEXT field, alter the column so writes don't fail.
|
||||||
|
# JSONB -> TEXT is a safe, lossless cast (JSONB is
|
||||||
|
# rendered as its JSON-text representation; the
|
||||||
|
# corresponding Pydantic ``@field_validator`` is
|
||||||
|
# responsible for re-decoding legacy data on read).
|
||||||
|
for col in sorted(desired_columns & existing_columns):
|
||||||
|
if col == "id":
|
||||||
|
continue
|
||||||
|
desired_sql = (model_fields.get(col) or "").upper()
|
||||||
|
currentType = existing_column_types.get(col, "")
|
||||||
|
if desired_sql == "TEXT" and currentType == "jsonb":
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text'
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'"
|
||||||
|
)
|
||||||
|
except Exception as alter_err:
|
||||||
|
logger.warning(
|
||||||
|
f"Could not downgrade column '{col}' on '{table}': {alter_err}"
|
||||||
|
)
|
||||||
except Exception as ensure_err:
|
except Exception as ensure_err:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Could not ensure columns for existing table '{table}': {ensure_err}"
|
f"Could not ensure columns for existing table '{table}': {ensure_err}"
|
||||||
|
|
@ -1365,6 +1396,192 @@ class DatabaseConnector:
|
||||||
self.connection.rollback()
|
self.connection.rollback()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def recordCreateBulk(
|
||||||
|
self, model_class: type, records: List[Union[Dict[str, Any], BaseModel]]
|
||||||
|
) -> int:
|
||||||
|
"""Bulk-insert many records in a single transaction.
|
||||||
|
|
||||||
|
Use this instead of calling recordCreate() in a tight loop when importing
|
||||||
|
large datasets (>100 rows). Performance gain is roughly two orders of
|
||||||
|
magnitude because:
|
||||||
|
- one network round-trip via execute_values() instead of N
|
||||||
|
- one COMMIT instead of N
|
||||||
|
- initial ID is registered once for the whole batch instead of every row
|
||||||
|
|
||||||
|
Returns the number of rows successfully inserted. Caller is responsible
|
||||||
|
for catching exceptions; on any error the transaction is rolled back so
|
||||||
|
the table stays consistent (all-or-nothing).
|
||||||
|
"""
|
||||||
|
if not records:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
table = model_class.__name__
|
||||||
|
if not self._ensureTableExists(model_class):
|
||||||
|
raise ValueError(f"Table {table} does not exist")
|
||||||
|
|
||||||
|
fields = _get_model_fields(model_class)
|
||||||
|
columns = ["id"] + [f for f in fields.keys() if f != "id"]
|
||||||
|
modelFields = model_class.model_fields
|
||||||
|
|
||||||
|
effectiveUserId = _current_user_id.get()
|
||||||
|
if effectiveUserId is None:
|
||||||
|
effectiveUserId = self.userId
|
||||||
|
currentTime = getUtcTimestamp()
|
||||||
|
|
||||||
|
normalised: List[Dict[str, Any]] = []
|
||||||
|
for raw in records:
|
||||||
|
if isinstance(raw, BaseModel):
|
||||||
|
rec = raw.model_dump()
|
||||||
|
elif isinstance(raw, dict):
|
||||||
|
rec = raw.copy()
|
||||||
|
else:
|
||||||
|
raise ValueError("Bulk record must be a Pydantic model or dictionary")
|
||||||
|
if "id" not in rec or not rec["id"]:
|
||||||
|
rec["id"] = str(uuid.uuid4())
|
||||||
|
createdTs = rec.get("sysCreatedAt")
|
||||||
|
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||||
|
rec["sysCreatedAt"] = currentTime
|
||||||
|
if effectiveUserId:
|
||||||
|
rec["sysCreatedBy"] = effectiveUserId
|
||||||
|
elif not rec.get("sysCreatedBy") and effectiveUserId:
|
||||||
|
rec["sysCreatedBy"] = effectiveUserId
|
||||||
|
rec["sysModifiedAt"] = currentTime
|
||||||
|
if effectiveUserId:
|
||||||
|
rec["sysModifiedBy"] = effectiveUserId
|
||||||
|
normalised.append(rec)
|
||||||
|
|
||||||
|
rows = [self._coerceRowForInsert(rec, columns, fields, modelFields) for rec in normalised]
|
||||||
|
|
||||||
|
col_names = ", ".join([f'"{c}"' for c in columns])
|
||||||
|
updates = ", ".join(
|
||||||
|
[f'"{c}" = EXCLUDED."{c}"' for c in columns[1:]
|
||||||
|
if c not in ("sysCreatedAt", "sysCreatedBy")]
|
||||||
|
)
|
||||||
|
sql = (
|
||||||
|
f'INSERT INTO "{table}" ({col_names}) VALUES %s '
|
||||||
|
f'ON CONFLICT ("id") DO UPDATE SET {updates}'
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._ensure_connection()
|
||||||
|
with self.connection.cursor() as cursor:
|
||||||
|
psycopg2.extras.execute_values(cursor, sql, rows, page_size=500)
|
||||||
|
self.connection.commit()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Bulk insert into {table} failed (n={len(rows)}): {e}")
|
||||||
|
try:
|
||||||
|
self.connection.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
|
||||||
|
if self.getInitialId(model_class) is None and normalised:
|
||||||
|
self._registerInitialId(table, normalised[0]["id"])
|
||||||
|
logger.info(f"Registered initial ID {normalised[0]['id']} for table {table}")
|
||||||
|
|
||||||
|
return len(rows)
|
||||||
|
|
||||||
|
def _coerceRowForInsert(
|
||||||
|
self,
|
||||||
|
record: Dict[str, Any],
|
||||||
|
columns: List[str],
|
||||||
|
fields: Dict[str, str],
|
||||||
|
modelFields: Dict[str, Any],
|
||||||
|
) -> tuple:
|
||||||
|
"""Convert one record dict to a positional tuple matching `columns`.
|
||||||
|
|
||||||
|
Mirrors the per-column coercion logic in `_save_record` so that bulk and
|
||||||
|
single inserts produce identical on-disk values (timestamps as floats,
|
||||||
|
enums as strings, vectors as pgvector text, JSONB as JSON strings).
|
||||||
|
"""
|
||||||
|
import json as _json
|
||||||
|
out = []
|
||||||
|
for col in columns:
|
||||||
|
value = record.get(col)
|
||||||
|
if col in ("sysCreatedAt", "sysModifiedAt") and value is not None:
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
value = float(value)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
elif hasattr(value, "value"):
|
||||||
|
value = value.value
|
||||||
|
elif col in fields and _isVectorType(fields[col]) and value is not None:
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = f"[{','.join(str(v) for v in value)}]"
|
||||||
|
elif col in fields and fields[col] == "JSONB" and value is not None:
|
||||||
|
if isinstance(value, (dict, list)):
|
||||||
|
value = _json.dumps(value)
|
||||||
|
elif isinstance(value, str):
|
||||||
|
try:
|
||||||
|
_json.loads(value)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
value = _json.dumps(value)
|
||||||
|
elif hasattr(value, "model_dump"):
|
||||||
|
value = _json.dumps(value.model_dump())
|
||||||
|
else:
|
||||||
|
value = _json.dumps(value)
|
||||||
|
out.append(value)
|
||||||
|
return tuple(out)
|
||||||
|
|
||||||
|
def recordDeleteWhere(
|
||||||
|
self, model_class: type, recordFilter: Dict[str, Any]
|
||||||
|
) -> int:
|
||||||
|
"""Delete all records matching a simple equality filter, in one statement.
|
||||||
|
|
||||||
|
Replaces the N+1 pattern `for r in getRecordset(...): recordDelete(r.id)`.
|
||||||
|
Returns the number of rows actually deleted. If the table holds the
|
||||||
|
initial ID and that row gets deleted, the initial ID registration is
|
||||||
|
cleared so the next insert can re-register a fresh one.
|
||||||
|
"""
|
||||||
|
if not recordFilter:
|
||||||
|
raise ValueError("recordDeleteWhere requires a non-empty recordFilter (refusing to truncate)")
|
||||||
|
|
||||||
|
table = model_class.__name__
|
||||||
|
if not self._ensureTableExists(model_class):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
fields = _get_model_fields(model_class)
|
||||||
|
clauses: List[str] = []
|
||||||
|
params: List[Any] = []
|
||||||
|
for key, val in recordFilter.items():
|
||||||
|
if key not in fields and key != "id":
|
||||||
|
raise ValueError(f"recordDeleteWhere: unknown column {table}.{key}")
|
||||||
|
clauses.append(f'"{key}" = %s')
|
||||||
|
params.append(val)
|
||||||
|
whereSql = " AND ".join(clauses)
|
||||||
|
|
||||||
|
initialId = self.getInitialId(model_class)
|
||||||
|
try:
|
||||||
|
self._ensure_connection()
|
||||||
|
with self.connection.cursor() as cursor:
|
||||||
|
if initialId is not None:
|
||||||
|
cursor.execute(
|
||||||
|
f'SELECT 1 FROM "{table}" WHERE "id" = %s AND ' + whereSql,
|
||||||
|
[initialId, *params],
|
||||||
|
)
|
||||||
|
initialIsAffected = cursor.fetchone() is not None
|
||||||
|
else:
|
||||||
|
initialIsAffected = False
|
||||||
|
|
||||||
|
cursor.execute(f'DELETE FROM "{table}" WHERE ' + whereSql, params)
|
||||||
|
deleted = cursor.rowcount or 0
|
||||||
|
self.connection.commit()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Bulk delete from {table} failed (filter={recordFilter}): {e}")
|
||||||
|
try:
|
||||||
|
self.connection.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
|
||||||
|
if deleted and initialIsAffected:
|
||||||
|
self._removeInitialId(table)
|
||||||
|
logger.info(f"Initial ID for table {table} cleared (bulk-delete removed it)")
|
||||||
|
if deleted:
|
||||||
|
logger.info(f"recordDeleteWhere: deleted {deleted} rows from {table} where {recordFilter}")
|
||||||
|
return deleted
|
||||||
|
|
||||||
def getInitialId(self, model_class: type) -> Optional[str]:
|
def getInitialId(self, model_class: type) -> Optional[str]:
|
||||||
"""Returns the initial ID for a table."""
|
"""Returns the initial ID for a table."""
|
||||||
table = model_class.__name__
|
table = model_class.__name__
|
||||||
|
|
|
||||||
|
|
@ -24,8 +24,21 @@ class ServiceAdapter(ABC):
|
||||||
"""Standardized operations for a single service of a provider."""
|
"""Standardized operations for a single service of a provider."""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> list:
|
async def browse(
|
||||||
"""List items (files/folders) at the given path."""
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
|
"""List items (files/folders) at the given path.
|
||||||
|
|
||||||
|
``limit`` is an optional upper bound for the number of returned entries.
|
||||||
|
Adapters that talk to paginated APIs should keep paging until either
|
||||||
|
the API is exhausted OR ``limit`` is reached. ``None`` means "use the
|
||||||
|
adapter's sensible default" (NOT "unlimited") so an over-eager caller
|
||||||
|
cannot accidentally pull millions of records. Adapters that have no
|
||||||
|
pagination (single page result) may ignore this parameter.
|
||||||
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
@ -39,8 +52,16 @@ class ServiceAdapter(ABC):
|
||||||
...
|
...
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> list:
|
async def search(
|
||||||
"""Search for items matching the query."""
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
|
"""Search for items matching the query.
|
||||||
|
|
||||||
|
See :meth:`browse` for the semantics of ``limit``.
|
||||||
|
"""
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
419
modules/connectors/connectorTicketsRedmine.py
Normal file
419
modules/connectors/connectorTicketsRedmine.py
Normal file
|
|
@ -0,0 +1,419 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine REST connector.
|
||||||
|
|
||||||
|
Async / aiohttp port of the SSS pilot client
|
||||||
|
(``pamocreate/projects/valueon/sss/project_mars/redmine-sync/code/_redmineClient.py``)
|
||||||
|
plus the read-side helpers required by ``serviceRedmine`` and
|
||||||
|
``serviceRedmineStats``.
|
||||||
|
|
||||||
|
Auth: ``X-Redmine-API-Key`` header. The key is *never* logged.
|
||||||
|
|
||||||
|
Idempotency / safety:
|
||||||
|
- ``DELETE /issues/{id}`` is often forbidden in Redmine (HTTP 403).
|
||||||
|
``deleteIssue`` returns ``False`` instead of raising in that case so
|
||||||
|
the higher layer can fall back to status-based archival.
|
||||||
|
- A small ``_throttleSeconds`` delay (default 150 ms) is awaited after
|
||||||
|
every write call to keep the SSS server happy.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelTickets import TicketBase, TicketFieldAttribute
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineApiError(RuntimeError):
|
||||||
|
"""Raised when the Redmine API returns a non-success status."""
|
||||||
|
|
||||||
|
def __init__(self, status: int, body: str, method: str, path: str):
|
||||||
|
self.status = status
|
||||||
|
self.body = body
|
||||||
|
self.method = method
|
||||||
|
self.path = path
|
||||||
|
super().__init__(f"Redmine {method} {path} failed: HTTP {status} {body[:300]}")
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectorTicketsRedmine(TicketBase):
|
||||||
|
"""Async Redmine connector. One instance per (baseUrl, apiKey, projectId)."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
baseUrl: str,
|
||||||
|
apiKey: str,
|
||||||
|
projectId: str,
|
||||||
|
throttleSeconds: float = 0.15,
|
||||||
|
timeoutSeconds: float = 30.0,
|
||||||
|
) -> None:
|
||||||
|
if not baseUrl:
|
||||||
|
raise ValueError("Redmine baseUrl is required")
|
||||||
|
if not apiKey:
|
||||||
|
raise ValueError("Redmine apiKey is required")
|
||||||
|
self._baseUrl = baseUrl.rstrip("/")
|
||||||
|
self._apiKey = apiKey
|
||||||
|
self._projectId = str(projectId) if projectId is not None else ""
|
||||||
|
self._throttleSeconds = max(0.0, float(throttleSeconds))
|
||||||
|
self._timeoutSeconds = float(timeoutSeconds)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Low-level
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _headers(self) -> Dict[str, str]:
|
||||||
|
return {
|
||||||
|
"X-Redmine-API-Key": self._apiKey,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _call(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
path: str,
|
||||||
|
*,
|
||||||
|
payload: Optional[Dict[str, Any]] = None,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Tuple[int, Optional[Dict[str, Any]], str]:
|
||||||
|
"""Single REST call. Returns ``(status, json_or_none, raw_body)``.
|
||||||
|
|
||||||
|
Does *not* raise -- the caller decides whether a non-2xx is fatal
|
||||||
|
(e.g. 403 on DELETE is expected and handled).
|
||||||
|
"""
|
||||||
|
url = f"{self._baseUrl}{path}"
|
||||||
|
if params:
|
||||||
|
url = f"{url}?{urlencode(params)}"
|
||||||
|
timeout = aiohttp.ClientTimeout(total=self._timeoutSeconds)
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.request(method, url, headers=self._headers(), json=payload) as resp:
|
||||||
|
raw = await resp.text()
|
||||||
|
parsed: Optional[Dict[str, Any]] = None
|
||||||
|
if raw:
|
||||||
|
try:
|
||||||
|
parsed = await resp.json(content_type=None)
|
||||||
|
except Exception:
|
||||||
|
parsed = None
|
||||||
|
return resp.status, parsed, raw
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
logger.warning(f"Redmine {method} {path} client error: {e}")
|
||||||
|
return -1, None, f"ClientError: {e}"
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning(f"Redmine {method} {path} timeout after {self._timeoutSeconds}s")
|
||||||
|
return -1, None, "Timeout"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _isOk(status: int) -> bool:
|
||||||
|
return 200 <= status < 300
|
||||||
|
|
||||||
|
async def _gentle(self) -> None:
|
||||||
|
if self._throttleSeconds > 0:
|
||||||
|
await asyncio.sleep(self._throttleSeconds)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Identity / health
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def whoAmI(self) -> Dict[str, Any]:
|
||||||
|
status, body, raw = await self._call("GET", "/users/current.json")
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", "/users/current.json")
|
||||||
|
return body.get("user", {})
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Project meta -- trackers, statuses, priorities, custom fields, users
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def getTrackers(self) -> List[Dict[str, Any]]:
|
||||||
|
status, body, raw = await self._call("GET", "/trackers.json")
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", "/trackers.json")
|
||||||
|
return body.get("trackers", []) or []
|
||||||
|
|
||||||
|
async def getStatuses(self) -> List[Dict[str, Any]]:
|
||||||
|
status, body, raw = await self._call("GET", "/issue_statuses.json")
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", "/issue_statuses.json")
|
||||||
|
return body.get("issue_statuses", []) or []
|
||||||
|
|
||||||
|
async def getPriorities(self) -> List[Dict[str, Any]]:
|
||||||
|
status, body, raw = await self._call(
|
||||||
|
"GET", "/enumerations/issue_priorities.json"
|
||||||
|
)
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
return []
|
||||||
|
return body.get("issue_priorities", []) or []
|
||||||
|
|
||||||
|
async def getCustomFields(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Requires admin privileges in Redmine. Returns ``[]`` if forbidden."""
|
||||||
|
status, body, raw = await self._call("GET", "/custom_fields.json")
|
||||||
|
if status == 403 or status == 401:
|
||||||
|
logger.info("Redmine /custom_fields.json forbidden -- using per-issue field discovery")
|
||||||
|
return []
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", "/custom_fields.json")
|
||||||
|
return body.get("custom_fields", []) or []
|
||||||
|
|
||||||
|
async def getProjectUsers(self) -> List[Dict[str, Any]]:
|
||||||
|
status, body, raw = await self._call(
|
||||||
|
"GET", f"/projects/{self._projectId}/memberships.json", params={"limit": 100}
|
||||||
|
)
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
return []
|
||||||
|
members = body.get("memberships", []) or []
|
||||||
|
users: List[Dict[str, Any]] = []
|
||||||
|
seen: set[int] = set()
|
||||||
|
for m in members:
|
||||||
|
user = m.get("user")
|
||||||
|
if not user:
|
||||||
|
continue
|
||||||
|
uid = user.get("id")
|
||||||
|
if uid in seen:
|
||||||
|
continue
|
||||||
|
seen.add(uid)
|
||||||
|
users.append(user)
|
||||||
|
return users
|
||||||
|
|
||||||
|
async def getProjectInfo(self) -> Dict[str, Any]:
|
||||||
|
status, body, raw = await self._call("GET", f"/projects/{self._projectId}.json")
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", f"/projects/{self._projectId}.json")
|
||||||
|
return body.get("project", {})
|
||||||
|
|
||||||
|
async def getIssueCategories(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Per-project issue categories. Returns ``[]`` if the endpoint
|
||||||
|
is forbidden or the project has no categories defined."""
|
||||||
|
path = f"/projects/{self._projectId}/issue_categories.json"
|
||||||
|
status, body, raw = await self._call("GET", path)
|
||||||
|
if status in (401, 403, 404) or not self._isOk(status) or not body:
|
||||||
|
return []
|
||||||
|
return body.get("issue_categories", []) or []
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Issues -- read
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def getIssue(
|
||||||
|
self, issueId: int, *, includeRelations: bool = True, includeChildren: bool = False
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
includes = ["custom_fields", "journals"]
|
||||||
|
if includeRelations:
|
||||||
|
includes.append("relations")
|
||||||
|
if includeChildren:
|
||||||
|
includes.append("children")
|
||||||
|
params = {"include": ",".join(includes)}
|
||||||
|
status, body, raw = await self._call("GET", f"/issues/{issueId}.json", params=params)
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", f"/issues/{issueId}.json")
|
||||||
|
return body.get("issue", {})
|
||||||
|
|
||||||
|
async def listIssues(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
trackerId: Optional[int] = None,
|
||||||
|
statusId: Optional[str] = "*",
|
||||||
|
updatedOnFrom: Optional[str] = None,
|
||||||
|
updatedOnTo: Optional[str] = None,
|
||||||
|
createdOnFrom: Optional[str] = None,
|
||||||
|
createdOnTo: Optional[str] = None,
|
||||||
|
assignedToId: Optional[int] = None,
|
||||||
|
subjectContains: Optional[str] = None,
|
||||||
|
limit: int = 100,
|
||||||
|
offset: int = 0,
|
||||||
|
include: Optional[List[str]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Single-page list. Returns the raw envelope ``{issues, total_count, offset, limit}``."""
|
||||||
|
params: Dict[str, Any] = {
|
||||||
|
"project_id": self._projectId,
|
||||||
|
"limit": str(limit),
|
||||||
|
"offset": str(offset),
|
||||||
|
}
|
||||||
|
if statusId is not None:
|
||||||
|
params["status_id"] = str(statusId)
|
||||||
|
if trackerId is not None:
|
||||||
|
params["tracker_id"] = str(trackerId)
|
||||||
|
if assignedToId is not None:
|
||||||
|
params["assigned_to_id"] = str(assignedToId)
|
||||||
|
if subjectContains:
|
||||||
|
params["subject"] = f"~{subjectContains}"
|
||||||
|
if updatedOnFrom and updatedOnTo:
|
||||||
|
params["updated_on"] = f"><{updatedOnFrom}|{updatedOnTo}"
|
||||||
|
elif updatedOnFrom:
|
||||||
|
params["updated_on"] = f">={updatedOnFrom}"
|
||||||
|
elif updatedOnTo:
|
||||||
|
params["updated_on"] = f"<={updatedOnTo}"
|
||||||
|
if createdOnFrom and createdOnTo:
|
||||||
|
params["created_on"] = f"><{createdOnFrom}|{createdOnTo}"
|
||||||
|
elif createdOnFrom:
|
||||||
|
params["created_on"] = f">={createdOnFrom}"
|
||||||
|
elif createdOnTo:
|
||||||
|
params["created_on"] = f"<={createdOnTo}"
|
||||||
|
if include:
|
||||||
|
params["include"] = ",".join(include)
|
||||||
|
|
||||||
|
status, body, raw = await self._call("GET", "/issues.json", params=params)
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "GET", "/issues.json")
|
||||||
|
return body
|
||||||
|
|
||||||
|
async def listAllIssues(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
trackerId: Optional[int] = None,
|
||||||
|
statusId: Optional[str] = "*",
|
||||||
|
updatedOnFrom: Optional[str] = None,
|
||||||
|
updatedOnTo: Optional[str] = None,
|
||||||
|
createdOnFrom: Optional[str] = None,
|
||||||
|
createdOnTo: Optional[str] = None,
|
||||||
|
assignedToId: Optional[int] = None,
|
||||||
|
pageSize: int = 100,
|
||||||
|
maxPages: int = 50,
|
||||||
|
include: Optional[List[str]] = None,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Paginate ``listIssues`` and return all matching raw issues."""
|
||||||
|
all_issues: List[Dict[str, Any]] = []
|
||||||
|
offset = 0
|
||||||
|
for _page in range(maxPages):
|
||||||
|
envelope = await self.listIssues(
|
||||||
|
trackerId=trackerId,
|
||||||
|
statusId=statusId,
|
||||||
|
updatedOnFrom=updatedOnFrom,
|
||||||
|
updatedOnTo=updatedOnTo,
|
||||||
|
createdOnFrom=createdOnFrom,
|
||||||
|
createdOnTo=createdOnTo,
|
||||||
|
assignedToId=assignedToId,
|
||||||
|
limit=pageSize,
|
||||||
|
offset=offset,
|
||||||
|
include=include,
|
||||||
|
)
|
||||||
|
page_issues = envelope.get("issues", []) or []
|
||||||
|
all_issues.extend(page_issues)
|
||||||
|
total = int(envelope.get("total_count") or 0)
|
||||||
|
offset += len(page_issues)
|
||||||
|
if not page_issues or offset >= total:
|
||||||
|
break
|
||||||
|
return all_issues
|
||||||
|
|
||||||
|
async def listRelations(self, issueId: int) -> List[Dict[str, Any]]:
|
||||||
|
issue = await self.getIssue(issueId, includeRelations=True)
|
||||||
|
return issue.get("relations", []) or []
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Issues -- write
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def createIssue(self, fields: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
body_in = {"issue": dict(fields)}
|
||||||
|
body_in["issue"].setdefault("project_id", self._projectId)
|
||||||
|
status, body, raw = await self._call("POST", "/issues.json", payload=body_in)
|
||||||
|
await self._gentle()
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "POST", "/issues.json")
|
||||||
|
return body.get("issue", {})
|
||||||
|
|
||||||
|
async def updateIssue(
|
||||||
|
self, issueId: int, fields: Dict[str, Any], *, notes: Optional[str] = None
|
||||||
|
) -> bool:
|
||||||
|
body_in: Dict[str, Any] = {"issue": dict(fields)}
|
||||||
|
if notes:
|
||||||
|
body_in["issue"]["notes"] = notes
|
||||||
|
status, body, raw = await self._call("PUT", f"/issues/{issueId}.json", payload=body_in)
|
||||||
|
await self._gentle()
|
||||||
|
if status == 204:
|
||||||
|
return True
|
||||||
|
if not self._isOk(status):
|
||||||
|
raise RedmineApiError(status, raw, "PUT", f"/issues/{issueId}.json")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def deleteIssue(self, issueId: int) -> bool:
|
||||||
|
"""Returns ``False`` if Redmine forbids deletion (HTTP 403/401)."""
|
||||||
|
status, body, raw = await self._call("DELETE", f"/issues/{issueId}.json")
|
||||||
|
await self._gentle()
|
||||||
|
if status in (200, 204):
|
||||||
|
return True
|
||||||
|
if status in (401, 403):
|
||||||
|
logger.info(f"Redmine DELETE issue {issueId} forbidden ({status}) -- caller should fall back")
|
||||||
|
return False
|
||||||
|
raise RedmineApiError(status, raw, "DELETE", f"/issues/{issueId}.json")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Relations -- write
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def addRelation(
|
||||||
|
self, fromId: int, toId: int, *, relationType: str = "relates", delay: Optional[int] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
rel: Dict[str, Any] = {"issue_to_id": toId, "relation_type": relationType}
|
||||||
|
if delay is not None:
|
||||||
|
rel["delay"] = int(delay)
|
||||||
|
status, body, raw = await self._call(
|
||||||
|
"POST", f"/issues/{fromId}/relations.json", payload={"relation": rel}
|
||||||
|
)
|
||||||
|
await self._gentle()
|
||||||
|
if not self._isOk(status) or not body:
|
||||||
|
raise RedmineApiError(status, raw, "POST", f"/issues/{fromId}/relations.json")
|
||||||
|
return body.get("relation", {})
|
||||||
|
|
||||||
|
async def deleteRelation(self, relationId: int) -> bool:
|
||||||
|
status, body, raw = await self._call("DELETE", f"/relations/{relationId}.json")
|
||||||
|
await self._gentle()
|
||||||
|
if status in (200, 204):
|
||||||
|
return True
|
||||||
|
if status in (401, 403):
|
||||||
|
return False
|
||||||
|
raise RedmineApiError(status, raw, "DELETE", f"/relations/{relationId}.json")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# TicketBase compliance (used by AI-tool path)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def readAttributes(self) -> List[TicketFieldAttribute]:
|
||||||
|
"""Static base attributes + project custom fields (best-effort)."""
|
||||||
|
attrs: List[TicketFieldAttribute] = [
|
||||||
|
TicketFieldAttribute(fieldName="Subject", field="subject"),
|
||||||
|
TicketFieldAttribute(fieldName="Description", field="description"),
|
||||||
|
TicketFieldAttribute(fieldName="Tracker", field="tracker_id"),
|
||||||
|
TicketFieldAttribute(fieldName="Status", field="status_id"),
|
||||||
|
TicketFieldAttribute(fieldName="Priority", field="priority_id"),
|
||||||
|
TicketFieldAttribute(fieldName="Assignee", field="assigned_to_id"),
|
||||||
|
TicketFieldAttribute(fieldName="Parent", field="parent_issue_id"),
|
||||||
|
TicketFieldAttribute(fieldName="Target Version", field="fixed_version_id"),
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
cfs = await self.getCustomFields()
|
||||||
|
except Exception:
|
||||||
|
cfs = []
|
||||||
|
for cf in cfs:
|
||||||
|
try:
|
||||||
|
attrs.append(
|
||||||
|
TicketFieldAttribute(
|
||||||
|
fieldName=str(cf.get("name", f"cf_{cf.get('id')}")),
|
||||||
|
field=f"cf_{cf.get('id')}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
async def readTasks(self, *, limit: int = 0) -> List[Dict[str, Any]]:
|
||||||
|
if limit and limit > 0:
|
||||||
|
envelope = await self.listIssues(limit=limit)
|
||||||
|
return envelope.get("issues", []) or []
|
||||||
|
return await self.listAllIssues()
|
||||||
|
|
||||||
|
async def writeTasks(self, tasklist: List[Dict[str, Any]]) -> None:
|
||||||
|
for task in tasklist:
|
||||||
|
issue_id = task.get("id")
|
||||||
|
fields = {k: v for k, v in task.items() if k != "id"}
|
||||||
|
if issue_id:
|
||||||
|
await self.updateIssue(int(issue_id), fields)
|
||||||
|
else:
|
||||||
|
await self.createIssue(fields)
|
||||||
|
|
@ -54,7 +54,12 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
self._svc = ClickupService(context=None, get_service=lambda _: None)
|
self._svc = ClickupService(context=None, get_service=lambda _: None)
|
||||||
self._svc.setAccessToken(access_token)
|
self._svc.setAccessToken(access_token)
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
p = _norm(path)
|
p = _norm(path)
|
||||||
out: List[ExternalEntry] = []
|
out: List[ExternalEntry] = []
|
||||||
|
|
||||||
|
|
@ -173,7 +178,11 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
)
|
)
|
||||||
if len(tasks) < 100:
|
if len(tasks) < 100:
|
||||||
break
|
break
|
||||||
|
if limit is not None and len(out) >= int(limit):
|
||||||
|
break
|
||||||
page += 1
|
page += 1
|
||||||
|
if limit is not None:
|
||||||
|
out = out[: max(1, int(limit))]
|
||||||
return out
|
return out
|
||||||
|
|
||||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||||
|
|
@ -213,7 +222,12 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
task_id = m.group(3)
|
task_id = m.group(3)
|
||||||
return await self._svc.uploadTaskAttachment(task_id, data, fileName)
|
return await self._svc.uploadTaskAttachment(task_id, data, fileName)
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
base = _norm(path or "/")
|
base = _norm(path or "/")
|
||||||
team_id: Optional[str] = None
|
team_id: Optional[str] = None
|
||||||
mt = re.match(r"^/team/([^/]+)", base)
|
mt = re.match(r"^/team/([^/]+)", base)
|
||||||
|
|
@ -252,7 +266,11 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
)
|
)
|
||||||
if len(tasks) < 25:
|
if len(tasks) < 25:
|
||||||
break
|
break
|
||||||
|
if limit is not None and len(out) >= int(limit):
|
||||||
|
break
|
||||||
page += 1
|
page += 1
|
||||||
|
if limit is not None:
|
||||||
|
out = out[: max(1, int(limit))]
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,12 @@ class FtpFilesAdapter(ServiceAdapter):
|
||||||
def __init__(self, accessToken: str):
|
def __init__(self, accessToken: str):
|
||||||
self._accessToken = accessToken
|
self._accessToken = accessToken
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
logger.info(f"FTP browse stub: {path}")
|
logger.info(f"FTP browse stub: {path}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
@ -32,7 +37,12 @@ class FtpFilesAdapter(ServiceAdapter):
|
||||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
return {"error": "FTP upload not yet implemented"}
|
return {"error": "FTP upload not yet implemented"}
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,11 +37,17 @@ class DriveAdapter(ServiceAdapter):
|
||||||
def __init__(self, accessToken: str):
|
def __init__(self, accessToken: str):
|
||||||
self._token = accessToken
|
self._token = accessToken
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
folderId = (path or "").strip("/") or "root"
|
folderId = (path or "").strip("/") or "root"
|
||||||
query = f"'{folderId}' in parents and trashed=false"
|
query = f"'{folderId}' in parents and trashed=false"
|
||||||
fields = "files(id,name,mimeType,size,modifiedTime,parents)"
|
fields = "files(id,name,mimeType,size,modifiedTime,parents)"
|
||||||
url = f"{_DRIVE_BASE}/files?q={query}&fields={fields}&pageSize=100&orderBy=folder,name"
|
pageSize = max(1, min(int(limit or 100), 1000))
|
||||||
|
url = f"{_DRIVE_BASE}/files?q={query}&fields={fields}&pageSize={pageSize}&orderBy=folder,name"
|
||||||
|
|
||||||
result = await _googleGet(self._token, url)
|
result = await _googleGet(self._token, url)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
|
|
@ -111,14 +117,20 @@ class DriveAdapter(ServiceAdapter):
|
||||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
return {"error": "Google Drive upload not yet implemented"}
|
return {"error": "Google Drive upload not yet implemented"}
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
safeQuery = query.replace("'", "\\'")
|
safeQuery = query.replace("'", "\\'")
|
||||||
folderId = (path or "").strip("/")
|
folderId = (path or "").strip("/")
|
||||||
qParts = [f"name contains '{safeQuery}'", "trashed=false"]
|
qParts = [f"name contains '{safeQuery}'", "trashed=false"]
|
||||||
if folderId:
|
if folderId:
|
||||||
qParts.append(f"'{folderId}' in parents")
|
qParts.append(f"'{folderId}' in parents")
|
||||||
qStr = " and ".join(qParts)
|
qStr = " and ".join(qParts)
|
||||||
url = f"{_DRIVE_BASE}/files?q={qStr}&fields=files(id,name,mimeType,size)&pageSize=25"
|
pageSize = max(1, min(int(limit or 100), 1000))
|
||||||
|
url = f"{_DRIVE_BASE}/files?q={qStr}&fields=files(id,name,mimeType,size)&pageSize={pageSize}"
|
||||||
logger.debug(f"Google Drive search: q={qStr}")
|
logger.debug(f"Google Drive search: q={qStr}")
|
||||||
result = await _googleGet(self._token, url)
|
result = await _googleGet(self._token, url)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
|
|
@ -140,7 +152,15 @@ class GmailAdapter(ServiceAdapter):
|
||||||
def __init__(self, accessToken: str):
|
def __init__(self, accessToken: str):
|
||||||
self._token = accessToken
|
self._token = accessToken
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> list:
|
_DEFAULT_MESSAGE_LIMIT = 100
|
||||||
|
_MAX_MESSAGE_LIMIT = 500
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
cleanPath = (path or "").strip("/")
|
cleanPath = (path or "").strip("/")
|
||||||
|
|
||||||
if not cleanPath:
|
if not cleanPath:
|
||||||
|
|
@ -165,13 +185,14 @@ class GmailAdapter(ServiceAdapter):
|
||||||
labels.sort(key=lambda e: (0 if e.metadata.get("type") == "system" else 1, e.name))
|
labels.sort(key=lambda e: (0 if e.metadata.get("type") == "system" else 1, e.name))
|
||||||
return labels
|
return labels
|
||||||
|
|
||||||
url = f"{_GMAIL_BASE}/users/me/messages?labelIds={cleanPath}&maxResults=25"
|
effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(1, min(int(limit), self._MAX_MESSAGE_LIMIT))
|
||||||
|
url = f"{_GMAIL_BASE}/users/me/messages?labelIds={cleanPath}&maxResults={effectiveLimit}"
|
||||||
result = await _googleGet(self._token, url)
|
result = await _googleGet(self._token, url)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
entries = []
|
entries = []
|
||||||
for msg in result.get("messages", [])[:25]:
|
for msg in result.get("messages", [])[:effectiveLimit]:
|
||||||
msgId = msg.get("id", "")
|
msgId = msg.get("id", "")
|
||||||
detailUrl = f"{_GMAIL_BASE}/users/me/messages/{msgId}?format=metadata&metadataHeaders=Subject&metadataHeaders=From&metadataHeaders=Date"
|
detailUrl = f"{_GMAIL_BASE}/users/me/messages/{msgId}?format=metadata&metadataHeaders=Subject&metadataHeaders=From&metadataHeaders=Date"
|
||||||
detail = await _googleGet(self._token, detailUrl)
|
detail = await _googleGet(self._token, detailUrl)
|
||||||
|
|
@ -231,8 +252,14 @@ class GmailAdapter(ServiceAdapter):
|
||||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
return {"error": "Gmail upload not applicable"}
|
return {"error": "Gmail upload not applicable"}
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> list:
|
async def search(
|
||||||
url = f"{_GMAIL_BASE}/users/me/messages?q={query}&maxResults=10"
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
|
effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(1, min(int(limit), self._MAX_MESSAGE_LIMIT))
|
||||||
|
url = f"{_GMAIL_BASE}/users/me/messages?q={query}&maxResults={effectiveLimit}"
|
||||||
result = await _googleGet(self._token, url)
|
result = await _googleGet(self._token, url)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
return []
|
return []
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,9 @@ class _GraphApiMixin:
|
||||||
async def _graphPut(self, endpoint: str, data: bytes = None) -> Dict[str, Any]:
|
async def _graphPut(self, endpoint: str, data: bytes = None) -> Dict[str, Any]:
|
||||||
return await _makeGraphCall(self._accessToken, endpoint, "PUT", data)
|
return await _makeGraphCall(self._accessToken, endpoint, "PUT", data)
|
||||||
|
|
||||||
|
async def _graphPatch(self, endpoint: str, data: Any = None) -> Dict[str, Any]:
|
||||||
|
return await _makeGraphCall(self._accessToken, endpoint, "PATCH", data)
|
||||||
|
|
||||||
async def _graphDelete(self, endpoint: str) -> Dict[str, Any]:
|
async def _graphDelete(self, endpoint: str) -> Dict[str, Any]:
|
||||||
return await _makeGraphCall(self._accessToken, endpoint, "DELETE")
|
return await _makeGraphCall(self._accessToken, endpoint, "DELETE")
|
||||||
|
|
||||||
|
|
@ -82,6 +85,9 @@ async def _makeGraphCall(
|
||||||
elif method == "PUT":
|
elif method == "PUT":
|
||||||
async with session.put(url, **kwargs) as resp:
|
async with session.put(url, **kwargs) as resp:
|
||||||
return await _handleResponse(resp)
|
return await _handleResponse(resp)
|
||||||
|
elif method == "PATCH":
|
||||||
|
async with session.patch(url, **kwargs) as resp:
|
||||||
|
return await _handleResponse(resp)
|
||||||
elif method == "DELETE":
|
elif method == "DELETE":
|
||||||
async with session.delete(url, **kwargs) as resp:
|
async with session.delete(url, **kwargs) as resp:
|
||||||
if resp.status in (200, 204):
|
if resp.status in (200, 204):
|
||||||
|
|
@ -99,11 +105,25 @@ async def _makeGraphCall(
|
||||||
async def _handleResponse(resp: aiohttp.ClientResponse) -> Dict[str, Any]:
|
async def _handleResponse(resp: aiohttp.ClientResponse) -> Dict[str, Any]:
|
||||||
if resp.status in (200, 201):
|
if resp.status in (200, 201):
|
||||||
return await resp.json()
|
return await resp.json()
|
||||||
|
if resp.status == 202:
|
||||||
|
return {"accepted": True}
|
||||||
|
if resp.status == 204:
|
||||||
|
return {}
|
||||||
errorText = await resp.text()
|
errorText = await resp.text()
|
||||||
logger.error(f"Graph API {resp.status}: {errorText}")
|
logger.error(f"Graph API {resp.status}: {errorText}")
|
||||||
return {"error": f"{resp.status}: {errorText}"}
|
return {"error": f"{resp.status}: {errorText}"}
|
||||||
|
|
||||||
|
|
||||||
|
def _stripGraphBase(url: str) -> str:
|
||||||
|
"""Convert an absolute Graph URL (used by @odata.nextLink) into the
|
||||||
|
relative endpoint that ``_makeGraphCall`` expects."""
|
||||||
|
if not url:
|
||||||
|
return ""
|
||||||
|
if url.startswith(_GRAPH_BASE):
|
||||||
|
return url[len(_GRAPH_BASE):].lstrip("/")
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> ExternalEntry:
|
def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> ExternalEntry:
|
||||||
isFolder = "folder" in item
|
isFolder = "folder" in item
|
||||||
return ExternalEntry(
|
return ExternalEntry(
|
||||||
|
|
@ -128,7 +148,12 @@ def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> Exter
|
||||||
class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"""ServiceAdapter for SharePoint (files, sites) via Microsoft Graph."""
|
"""ServiceAdapter for SharePoint (files, sites) via Microsoft Graph."""
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
"""List items in a SharePoint folder.
|
"""List items in a SharePoint folder.
|
||||||
|
|
||||||
Path format: /sites/<SiteName>/<FolderPath>
|
Path format: /sites/<SiteName>/<FolderPath>
|
||||||
|
|
@ -155,6 +180,8 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
||||||
if filter:
|
if filter:
|
||||||
entries = [e for e in entries if _matchFilter(e, filter)]
|
entries = [e for e in entries if _matchFilter(e, filter)]
|
||||||
|
if limit is not None:
|
||||||
|
entries = entries[: max(1, int(limit))]
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
async def _discoverSites(self) -> List[ExternalEntry]:
|
async def _discoverSites(self) -> List[ExternalEntry]:
|
||||||
|
|
@ -197,7 +224,12 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
result = await self._graphPut(endpoint, data)
|
result = await self._graphPut(endpoint, data)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
siteId, _ = _parseSharepointPath(path or "")
|
siteId, _ = _parseSharepointPath(path or "")
|
||||||
if not siteId:
|
if not siteId:
|
||||||
return []
|
return []
|
||||||
|
|
@ -206,7 +238,10 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
result = await self._graphGet(endpoint)
|
result = await self._graphGet(endpoint)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
return []
|
return []
|
||||||
return [_graphItemToExternalEntry(item) for item in result.get("value", [])]
|
entries = [_graphItemToExternalEntry(item) for item in result.get("value", [])]
|
||||||
|
if limit is not None:
|
||||||
|
entries = entries[: max(1, int(limit))]
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -216,31 +251,89 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"""ServiceAdapter for Outlook (mail, calendar) via Microsoft Graph."""
|
"""ServiceAdapter for Outlook (mail, calendar) via Microsoft Graph."""
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
# Default upper bound for messages returned from a single browse() call.
|
||||||
|
# Graph allows $top up to 1000 per page; we keep the default modest so
|
||||||
|
# accidental "browse all" calls don't blow up the LLM context. Callers
|
||||||
|
# (e.g. the agent's browseDataSource tool) can override via ``limit``.
|
||||||
|
_DEFAULT_MESSAGE_LIMIT = 100
|
||||||
|
_MAX_MESSAGE_LIMIT = 1000
|
||||||
|
_PAGE_SIZE = 100
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
"""List mail folders or messages.
|
"""List mail folders or messages.
|
||||||
|
|
||||||
path = "" or "/" → list mail folders
|
path = "" or "/" → list ALL top-level mail folders (paginated)
|
||||||
path = "/Inbox" → list messages in Inbox
|
path = "/<folderId>" → list messages in that folder (paginated, up to ``limit``)
|
||||||
"""
|
"""
|
||||||
if not path or path == "/":
|
if not path or path == "/":
|
||||||
result = await self._graphGet("me/mailFolders")
|
# Graph default page size for /me/mailFolders is 10. Mailboxes with
|
||||||
if "error" in result:
|
# localized + many system folders (Posteingang, Gesendet, Archiv, …)
|
||||||
return []
|
# often exceed that, so the well-known Inbox can fall off the first
|
||||||
|
# page. We page through all results AND hard-fall-back to the
|
||||||
|
# well-known shortcut /me/mailFolders/inbox so the default folder
|
||||||
|
# is always visible regardless of locale/order.
|
||||||
|
folders: List[Dict[str, Any]] = []
|
||||||
|
seenIds: set = set()
|
||||||
|
endpoint: Optional[str] = "me/mailFolders?$top=100"
|
||||||
|
while endpoint:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
break
|
||||||
|
for f in result.get("value", []):
|
||||||
|
fid = f.get("id")
|
||||||
|
if fid and fid not in seenIds:
|
||||||
|
seenIds.add(fid)
|
||||||
|
folders.append(f)
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
if not nextLink:
|
||||||
|
endpoint = None
|
||||||
|
else:
|
||||||
|
endpoint = _stripGraphBase(nextLink)
|
||||||
|
|
||||||
|
# Guarantee Inbox is present (well-known name, locale-independent)
|
||||||
|
if not any((f.get("displayName") or "").lower() in ("inbox", "posteingang") for f in folders):
|
||||||
|
inbox = await self._graphGet("me/mailFolders/inbox")
|
||||||
|
if "error" not in inbox and inbox.get("id") and inbox.get("id") not in seenIds:
|
||||||
|
folders.insert(0, inbox)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
ExternalEntry(
|
ExternalEntry(
|
||||||
name=f.get("displayName", ""),
|
name=f.get("displayName", ""),
|
||||||
path=f"/{f.get('id', '')}",
|
path=f"/{f.get('id', '')}",
|
||||||
isFolder=True,
|
isFolder=True,
|
||||||
metadata={"id": f.get("id"), "totalItemCount": f.get("totalItemCount")},
|
metadata={
|
||||||
|
"id": f.get("id"),
|
||||||
|
"totalItemCount": f.get("totalItemCount"),
|
||||||
|
"unreadItemCount": f.get("unreadItemCount"),
|
||||||
|
"childFolderCount": f.get("childFolderCount"),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
for f in result.get("value", [])
|
for f in folders
|
||||||
]
|
]
|
||||||
|
|
||||||
folderId = path.strip("/")
|
folderId = path.strip("/")
|
||||||
endpoint = f"me/mailFolders/{folderId}/messages?$top=25&$orderby=receivedDateTime desc"
|
effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(1, min(int(limit), self._MAX_MESSAGE_LIMIT))
|
||||||
result = await self._graphGet(endpoint)
|
pageSize = min(self._PAGE_SIZE, effectiveLimit)
|
||||||
if "error" in result:
|
endpoint: Optional[str] = (
|
||||||
return []
|
f"me/mailFolders/{folderId}/messages"
|
||||||
|
f"?$top={pageSize}&$orderby=receivedDateTime desc"
|
||||||
|
)
|
||||||
|
messages: List[Dict[str, Any]] = []
|
||||||
|
while endpoint and len(messages) < effectiveLimit:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
break
|
||||||
|
for m in result.get("value", []):
|
||||||
|
messages.append(m)
|
||||||
|
if len(messages) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||||
return [
|
return [
|
||||||
ExternalEntry(
|
ExternalEntry(
|
||||||
name=m.get("subject", "(no subject)"),
|
name=m.get("subject", "(no subject)"),
|
||||||
|
|
@ -253,7 +346,7 @@ class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"hasAttachments": m.get("hasAttachments", False),
|
"hasAttachments": m.get("hasAttachments", False),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
for m in result.get("value", [])
|
for m in messages
|
||||||
]
|
]
|
||||||
|
|
||||||
async def download(self, path: str) -> DownloadResult:
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
|
@ -279,9 +372,17 @@ class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"""Not applicable for Outlook in the file sense."""
|
"""Not applicable for Outlook in the file sense."""
|
||||||
return {"error": "Upload not supported for Outlook"}
|
return {"error": "Upload not supported for Outlook"}
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
safeQuery = query.replace("'", "''")
|
safeQuery = query.replace("'", "''")
|
||||||
endpoint = f"me/messages?$search=\"{safeQuery}\"&$top=25"
|
effectiveLimit = self._DEFAULT_MESSAGE_LIMIT if limit is None else max(1, min(int(limit), self._MAX_MESSAGE_LIMIT))
|
||||||
|
# NOTE: Graph $search does not support $orderby and may return a single
|
||||||
|
# page (no @odata.nextLink). We still pass $top to lift the implicit 25.
|
||||||
|
endpoint = f"me/messages?$search=\"{safeQuery}\"&$top={effectiveLimit}"
|
||||||
result = await self._graphGet(endpoint)
|
result = await self._graphGet(endpoint)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
return []
|
return []
|
||||||
|
|
@ -358,6 +459,265 @@ class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
return result
|
return result
|
||||||
return {"success": True, "draft": True, "messageId": result.get("id", "")}
|
return {"success": True, "draft": True, "messageId": result.get("id", "")}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Reply / Reply-All / Forward
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Microsoft Graph distinguishes between "send-immediately" endpoints
|
||||||
|
# (``/reply``, ``/replyAll``, ``/forward``) and their "create-draft"
|
||||||
|
# counterparts (``/createReply``, ``/createReplyAll``, ``/createForward``).
|
||||||
|
# The send-immediately variant accepts a free-text ``comment`` string
|
||||||
|
# that Graph prepends to the original conversation; the createReply*
|
||||||
|
# variants return a fully-populated draft message that the caller can
|
||||||
|
# further edit (e.g. via PATCH /me/messages/{id} with a richer body)
|
||||||
|
# before posting via /send. We expose both flavours so the agent can
|
||||||
|
# choose between "draft for review" and "send right now".
|
||||||
|
|
||||||
|
async def replyToMail(
|
||||||
|
self, messageId: str, comment: str,
|
||||||
|
replyAll: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Reply (or reply-all) to an existing message immediately.
|
||||||
|
|
||||||
|
Preserves the conversation thread and the ``AW:`` prefix in Outlook --
|
||||||
|
unlike sendMail() which creates a brand-new conversation.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
endpointAction = "replyAll" if replyAll else "reply"
|
||||||
|
payload = json.dumps({"comment": comment}).encode("utf-8")
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/{endpointAction}", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "action": endpointAction}
|
||||||
|
|
||||||
|
async def forwardMail(
|
||||||
|
self, messageId: str, to: List[str], comment: str = "",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Forward an existing message to new recipients."""
|
||||||
|
import json
|
||||||
|
payload = json.dumps({
|
||||||
|
"comment": comment,
|
||||||
|
"toRecipients": [{"emailAddress": {"address": addr}} for addr in to],
|
||||||
|
}).encode("utf-8")
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/forward", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "action": "forward"}
|
||||||
|
|
||||||
|
async def createReplyDraft(
|
||||||
|
self, messageId: str, comment: str = "",
|
||||||
|
replyAll: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a reply-draft (in the Drafts folder) that the user can edit before sending."""
|
||||||
|
import json
|
||||||
|
endpointAction = "createReplyAll" if replyAll else "createReply"
|
||||||
|
payload = json.dumps({"comment": comment}).encode("utf-8") if comment else b"{}"
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/{endpointAction}", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "draft": True, "messageId": result.get("id", ""), "originalMessageId": messageId}
|
||||||
|
|
||||||
|
async def createForwardDraft(
|
||||||
|
self, messageId: str, to: Optional[List[str]] = None, comment: str = "",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a forward-draft (in the Drafts folder) that the user can edit before sending."""
|
||||||
|
import json
|
||||||
|
body: Dict[str, Any] = {}
|
||||||
|
if comment:
|
||||||
|
body["comment"] = comment
|
||||||
|
if to:
|
||||||
|
body["toRecipients"] = [{"emailAddress": {"address": addr}} for addr in to]
|
||||||
|
payload = json.dumps(body).encode("utf-8") if body else b"{}"
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/createForward", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "draft": True, "messageId": result.get("id", ""), "originalMessageId": messageId}
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Folder-Management & Mail-Management
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Mapping of Microsoft Graph "well-known folder names" plus a few common
|
||||||
|
# localized display names (DE) so the LLM can write natural names like
|
||||||
|
# "Posteingang", "Archiv", "deletedItems" without having to look up the
|
||||||
|
# opaque mailbox folder ID first.
|
||||||
|
_WELL_KNOWN_FOLDERS = {
|
||||||
|
"inbox": "inbox",
|
||||||
|
"posteingang": "inbox",
|
||||||
|
"drafts": "drafts",
|
||||||
|
"entwürfe": "drafts",
|
||||||
|
"entwurf": "drafts",
|
||||||
|
"sentitems": "sentitems",
|
||||||
|
"gesendet": "sentitems",
|
||||||
|
"gesendete elemente": "sentitems",
|
||||||
|
"deleteditems": "deleteditems",
|
||||||
|
"gelöscht": "deleteditems",
|
||||||
|
"gelöschte elemente": "deleteditems",
|
||||||
|
"papierkorb": "deleteditems",
|
||||||
|
"trash": "deleteditems",
|
||||||
|
"junkemail": "junkemail",
|
||||||
|
"spam": "junkemail",
|
||||||
|
"junk": "junkemail",
|
||||||
|
"outbox": "outbox",
|
||||||
|
"postausgang": "outbox",
|
||||||
|
"archive": "archive",
|
||||||
|
"archiv": "archive",
|
||||||
|
"msgfolderroot": "msgfolderroot",
|
||||||
|
"root": "msgfolderroot",
|
||||||
|
}
|
||||||
|
|
||||||
|
async def listMailFolders(self) -> List[Dict[str, Any]]:
|
||||||
|
"""List all top-level mail folders with id, name and counts.
|
||||||
|
|
||||||
|
Returns a flat list of dicts so the caller (e.g. an LLM tool) does not
|
||||||
|
need to know the Graph nesting model. Use ``_resolveFolderId()`` to
|
||||||
|
translate a user-provided name into a Graph folder ID.
|
||||||
|
"""
|
||||||
|
folders: List[Dict[str, Any]] = []
|
||||||
|
seenIds: set = set()
|
||||||
|
endpoint: Optional[str] = "me/mailFolders?$top=100"
|
||||||
|
while endpoint:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
break
|
||||||
|
for f in result.get("value", []):
|
||||||
|
fid = f.get("id")
|
||||||
|
if fid and fid not in seenIds:
|
||||||
|
seenIds.add(fid)
|
||||||
|
folders.append({
|
||||||
|
"id": fid,
|
||||||
|
"displayName": f.get("displayName", ""),
|
||||||
|
"totalItemCount": f.get("totalItemCount", 0),
|
||||||
|
"unreadItemCount": f.get("unreadItemCount", 0),
|
||||||
|
"childFolderCount": f.get("childFolderCount", 0),
|
||||||
|
})
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||||
|
return folders
|
||||||
|
|
||||||
|
async def _resolveFolderId(self, folderRef: str) -> Optional[str]:
|
||||||
|
"""Resolve any user-supplied folder reference to a Graph folder ID.
|
||||||
|
|
||||||
|
Resolution order:
|
||||||
|
1. If it matches a well-known shortcut (locale-aware), return that
|
||||||
|
shortcut directly -- Graph accepts ``inbox``, ``drafts`` etc. in
|
||||||
|
the URL path.
|
||||||
|
2. If it looks like a Graph folder ID (long base64-ish string),
|
||||||
|
return as-is.
|
||||||
|
3. Otherwise fall back to a case-insensitive ``displayName`` match
|
||||||
|
against the user's mail folders.
|
||||||
|
|
||||||
|
Returns ``None`` if nothing matches so the caller can surface a clear
|
||||||
|
error instead of silently moving mail into the wrong place.
|
||||||
|
"""
|
||||||
|
if not folderRef:
|
||||||
|
return None
|
||||||
|
ref = folderRef.strip()
|
||||||
|
wellKnown = self._WELL_KNOWN_FOLDERS.get(ref.lower())
|
||||||
|
if wellKnown:
|
||||||
|
return wellKnown
|
||||||
|
# Heuristic: Graph folder IDs are long URL-safe base64 strings; never
|
||||||
|
# contain spaces; and almost always include "==" or AAAAA padding.
|
||||||
|
if len(ref) > 60 and " " not in ref:
|
||||||
|
return ref
|
||||||
|
for f in await self.listMailFolders():
|
||||||
|
if (f.get("displayName") or "").strip().lower() == ref.lower():
|
||||||
|
return f.get("id")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def moveMail(
|
||||||
|
self, messageId: str, destinationFolder: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Move a message to another folder (well-known name, displayName, or folder id)."""
|
||||||
|
import json
|
||||||
|
destId = await self._resolveFolderId(destinationFolder)
|
||||||
|
if not destId:
|
||||||
|
return {"error": f"Folder not found: '{destinationFolder}'. Use listMailFolders to inspect available folders."}
|
||||||
|
payload = json.dumps({"destinationId": destId}).encode("utf-8")
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/move", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": result.get("id", messageId), "destinationFolder": destinationFolder}
|
||||||
|
|
||||||
|
async def copyMail(
|
||||||
|
self, messageId: str, destinationFolder: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Copy a message into another folder (original stays in place)."""
|
||||||
|
import json
|
||||||
|
destId = await self._resolveFolderId(destinationFolder)
|
||||||
|
if not destId:
|
||||||
|
return {"error": f"Folder not found: '{destinationFolder}'. Use listMailFolders to inspect available folders."}
|
||||||
|
payload = json.dumps({"destinationId": destId}).encode("utf-8")
|
||||||
|
result = await self._graphPost(f"me/messages/{messageId}/copy", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "newMessageId": result.get("id", ""), "destinationFolder": destinationFolder}
|
||||||
|
|
||||||
|
async def archiveMail(self, messageId: str) -> Dict[str, Any]:
|
||||||
|
"""Move a message to the user's Archive folder.
|
||||||
|
|
||||||
|
Outlook's Archive is a regular mail folder, not a flag, so this is a
|
||||||
|
thin convenience wrapper around :py:meth:`moveMail`.
|
||||||
|
"""
|
||||||
|
return await self.moveMail(messageId, "archive")
|
||||||
|
|
||||||
|
async def deleteMail(
|
||||||
|
self, messageId: str,
|
||||||
|
*,
|
||||||
|
hardDelete: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Delete a message.
|
||||||
|
|
||||||
|
Default behaviour (``hardDelete=False``) moves the message to the
|
||||||
|
``Deleted Items`` folder, which mirrors what users see in the Outlook
|
||||||
|
UI when they press Delete. Set ``hardDelete=True`` to perform an
|
||||||
|
unrecoverable removal -- agent tools must require an extra
|
||||||
|
confirmation before invoking this path.
|
||||||
|
"""
|
||||||
|
if hardDelete:
|
||||||
|
result = await self._graphDelete(f"me/messages/{messageId}")
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "hardDelete": True}
|
||||||
|
return await self.moveMail(messageId, "deleteditems")
|
||||||
|
|
||||||
|
async def markMailAsRead(self, messageId: str) -> Dict[str, Any]:
|
||||||
|
"""Mark a message as read (sets ``isRead=true``)."""
|
||||||
|
import json
|
||||||
|
payload = json.dumps({"isRead": True}).encode("utf-8")
|
||||||
|
result = await self._graphPatch(f"me/messages/{messageId}", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "isRead": True}
|
||||||
|
|
||||||
|
async def markMailAsUnread(self, messageId: str) -> Dict[str, Any]:
|
||||||
|
"""Mark a message as unread (sets ``isRead=false``)."""
|
||||||
|
import json
|
||||||
|
payload = json.dumps({"isRead": False}).encode("utf-8")
|
||||||
|
result = await self._graphPatch(f"me/messages/{messageId}", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "isRead": False}
|
||||||
|
|
||||||
|
async def flagMail(
|
||||||
|
self, messageId: str,
|
||||||
|
*,
|
||||||
|
flagStatus: str = "flagged",
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Set or clear the follow-up flag on a message.
|
||||||
|
|
||||||
|
``flagStatus`` accepts ``"flagged"`` (default), ``"complete"`` or
|
||||||
|
``"notFlagged"`` -- the three values Microsoft Graph recognises for
|
||||||
|
``followupFlag.flagStatus``.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
if flagStatus not in ("flagged", "complete", "notFlagged"):
|
||||||
|
return {"error": f"Invalid flagStatus '{flagStatus}'. Use one of: flagged, complete, notFlagged."}
|
||||||
|
payload = json.dumps({"flag": {"flagStatus": flagStatus}}).encode("utf-8")
|
||||||
|
result = await self._graphPatch(f"me/messages/{messageId}", payload)
|
||||||
|
if "error" in result:
|
||||||
|
return result
|
||||||
|
return {"success": True, "messageId": messageId, "flagStatus": flagStatus}
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Teams Adapter (Stub)
|
# Teams Adapter (Stub)
|
||||||
|
|
@ -366,7 +726,12 @@ class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
class TeamsAdapter(_GraphApiMixin, ServiceAdapter):
|
class TeamsAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"""ServiceAdapter for Microsoft Teams -- browse joined teams and channels."""
|
"""ServiceAdapter for Microsoft Teams -- browse joined teams and channels."""
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> list:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
cleanPath = (path or "").strip("/")
|
cleanPath = (path or "").strip("/")
|
||||||
|
|
||||||
if not cleanPath:
|
if not cleanPath:
|
||||||
|
|
@ -408,7 +773,12 @@ class TeamsAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
return {"error": "Teams upload not implemented"}
|
return {"error": "Teams upload not implemented"}
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> list:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> list:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -419,7 +789,12 @@ class TeamsAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
"""ServiceAdapter stub for OneDrive (personal drive)."""
|
"""ServiceAdapter stub for OneDrive (personal drive)."""
|
||||||
|
|
||||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
cleanPath = (path or "").strip("/")
|
cleanPath = (path or "").strip("/")
|
||||||
if not cleanPath:
|
if not cleanPath:
|
||||||
endpoint = "me/drive/root/children"
|
endpoint = "me/drive/root/children"
|
||||||
|
|
@ -432,6 +807,8 @@ class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
||||||
if filter:
|
if filter:
|
||||||
entries = [e for e in entries if _matchFilter(e, filter)]
|
entries = [e for e in entries if _matchFilter(e, filter)]
|
||||||
|
if limit is not None:
|
||||||
|
entries = entries[: max(1, int(limit))]
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
async def download(self, path: str) -> bytes:
|
async def download(self, path: str) -> bytes:
|
||||||
|
|
@ -447,13 +824,21 @@ class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
endpoint = f"me/drive/root:/{uploadPath}:/content"
|
endpoint = f"me/drive/root:/{uploadPath}:/content"
|
||||||
return await self._graphPut(endpoint, data)
|
return await self._graphPut(endpoint, data)
|
||||||
|
|
||||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
safeQuery = query.replace("'", "''")
|
safeQuery = query.replace("'", "''")
|
||||||
endpoint = f"me/drive/root/search(q='{safeQuery}')"
|
endpoint = f"me/drive/root/search(q='{safeQuery}')"
|
||||||
result = await self._graphGet(endpoint)
|
result = await self._graphGet(endpoint)
|
||||||
if "error" in result:
|
if "error" in result:
|
||||||
return []
|
return []
|
||||||
return [_graphItemToExternalEntry(item) for item in result.get("value", [])]
|
entries = [_graphItemToExternalEntry(item) for item in result.get("value", [])]
|
||||||
|
if limit is not None:
|
||||||
|
entries = entries[: max(1, int(limit))]
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
|
||||||
130
modules/datamodels/datamodelBackgroundJob.py
Normal file
130
modules/datamodels/datamodelBackgroundJob.py
Normal file
|
|
@ -0,0 +1,130 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Background job models: generic, reusable infrastructure for long-running tasks.
|
||||||
|
|
||||||
|
A `BackgroundJob` record tracks the lifecycle of one async task that must not block
|
||||||
|
the calling HTTP request. Any caller (HTTP route, AI tool, scheduled task) can:
|
||||||
|
|
||||||
|
1. Register a handler once via `registerJobHandler(jobType, handler)`.
|
||||||
|
2. Submit work via `startJob(jobType, payload, ...)` which returns a `jobId`
|
||||||
|
immediately and runs the handler in the background.
|
||||||
|
3. Poll `getJobStatus(jobId)` (HTTP `GET /api/jobs/{jobId}`) until `status` is
|
||||||
|
one of {SUCCESS, ERROR, CANCELLED}.
|
||||||
|
|
||||||
|
See `modules.serviceCenter.services.serviceBackgroundJobs.mainBackgroundJobService`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
from enum import Enum
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelBase import PowerOnModel
|
||||||
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
|
|
||||||
|
class BackgroundJobStatusEnum(str, Enum):
|
||||||
|
"""Lifecycle status of a background job."""
|
||||||
|
PENDING = "PENDING"
|
||||||
|
RUNNING = "RUNNING"
|
||||||
|
SUCCESS = "SUCCESS"
|
||||||
|
ERROR = "ERROR"
|
||||||
|
CANCELLED = "CANCELLED"
|
||||||
|
|
||||||
|
|
||||||
|
TERMINAL_JOB_STATUSES = {
|
||||||
|
BackgroundJobStatusEnum.SUCCESS,
|
||||||
|
BackgroundJobStatusEnum.ERROR,
|
||||||
|
BackgroundJobStatusEnum.CANCELLED,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Hintergrund-Job")
|
||||||
|
class BackgroundJob(PowerOnModel):
|
||||||
|
"""Generic record describing a long-running asynchronous task.
|
||||||
|
|
||||||
|
Scope: the combination of `mandateId` and optionally `featureInstanceId`
|
||||||
|
is used for access control on `GET /api/jobs/{jobId}`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
description="Primary key",
|
||||||
|
json_schema_extra={"label": "ID"},
|
||||||
|
)
|
||||||
|
jobType: str = Field(
|
||||||
|
...,
|
||||||
|
description="Handler key registered via registerJobHandler() (e.g. 'trusteeAccountingSync')",
|
||||||
|
json_schema_extra={"label": "Typ"},
|
||||||
|
)
|
||||||
|
mandateId: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Mandanten-ID",
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Feature instance scope (optional)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Feature-Instanz",
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
triggeredBy: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="UserId or 'ai-tool:<toolName>' / 'scheduler:<jobName>'",
|
||||||
|
json_schema_extra={"label": "Ausgeloest von"},
|
||||||
|
)
|
||||||
|
|
||||||
|
status: str = Field(
|
||||||
|
default=BackgroundJobStatusEnum.PENDING.value,
|
||||||
|
description="Current lifecycle status",
|
||||||
|
json_schema_extra={"label": "Status"},
|
||||||
|
)
|
||||||
|
progress: int = Field(
|
||||||
|
default=0,
|
||||||
|
description="Progress 0..100 (best-effort; may stay 0 for handlers that cannot estimate)",
|
||||||
|
json_schema_extra={"label": "Fortschritt"},
|
||||||
|
)
|
||||||
|
progressMessage: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Human-readable current step (e.g. 'Importing journal entries...')",
|
||||||
|
json_schema_extra={"label": "Fortschritts-Nachricht"},
|
||||||
|
)
|
||||||
|
|
||||||
|
payload: Dict[str, Any] = Field(
|
||||||
|
default_factory=dict,
|
||||||
|
description="Job input parameters (JSON)",
|
||||||
|
json_schema_extra={"label": "Eingabe"},
|
||||||
|
)
|
||||||
|
result: Optional[Dict[str, Any]] = Field(
|
||||||
|
None,
|
||||||
|
description="Handler return value on success (JSON)",
|
||||||
|
json_schema_extra={"label": "Ergebnis"},
|
||||||
|
)
|
||||||
|
errorMessage: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Truncated error message on failure (full stack trace in logs)",
|
||||||
|
json_schema_extra={"label": "Fehler"},
|
||||||
|
)
|
||||||
|
|
||||||
|
createdAt: datetime = Field(
|
||||||
|
default_factory=lambda: datetime.now(timezone.utc),
|
||||||
|
description="When the job was submitted",
|
||||||
|
json_schema_extra={"label": "Eingereicht"},
|
||||||
|
)
|
||||||
|
startedAt: Optional[datetime] = Field(
|
||||||
|
None,
|
||||||
|
description="When the handler began running",
|
||||||
|
json_schema_extra={"label": "Gestartet"},
|
||||||
|
)
|
||||||
|
finishedAt: Optional[datetime] = Field(
|
||||||
|
None,
|
||||||
|
description="When the handler reached a terminal status",
|
||||||
|
json_schema_extra={"label": "Beendet"},
|
||||||
|
)
|
||||||
|
|
@ -176,7 +176,13 @@ class ChatWorkflow(PowerOnModel):
|
||||||
]})
|
]})
|
||||||
maxSteps: int = Field(default=10, description="Maximum number of iterations in dynamic mode", json_schema_extra={"label": "Max. Schritte", "frontend_type": "integer", "frontend_readonly": False, "frontend_required": False})
|
maxSteps: int = Field(default=10, description="Maximum number of iterations in dynamic mode", json_schema_extra={"label": "Max. Schritte", "frontend_type": "integer", "frontend_readonly": False, "frontend_required": False})
|
||||||
expectedFormats: Optional[List[str]] = Field(None, description="List of expected file format extensions from user request (e.g., ['xlsx', 'pdf']). Extracted during intent analysis.", json_schema_extra={"label": "Erwartete Formate", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
expectedFormats: Optional[List[str]] = Field(None, description="List of expected file format extensions from user request (e.g., ['xlsx', 'pdf']). Extracted during intent analysis.", json_schema_extra={"label": "Erwartete Formate", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
# Attached data sources (per-chat persistence so the chip-bar of the
|
||||||
|
# WorkspaceInput can be restored when the user re-opens the chat).
|
||||||
|
# Stored as JSONB list of UUIDs. Sources that no longer resolve (DS
|
||||||
|
# deleted in the meantime) are silently dropped on the frontend on load.
|
||||||
|
attachedDataSourceIds: Optional[List[str]] = Field(default_factory=list, description="IDs of DataSource records pinned to this chat (UDB attachments).", json_schema_extra={"label": "Angehängte Datenquellen", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
attachedFeatureDataSourceIds: Optional[List[str]] = Field(default_factory=list, description="IDs of FeatureDataSource records pinned to this chat (UDB feature attachments).", json_schema_extra={"label": "Angehängte Feature-Datenquellen", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
|
||||||
# Helper methods for execution state management
|
# Helper methods for execution state management
|
||||||
def getRoundIndex(self) -> int:
|
def getRoundIndex(self) -> int:
|
||||||
"""Get current round index"""
|
"""Get current round index"""
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,14 @@ class FileItem(PowerOnModel):
|
||||||
)
|
)
|
||||||
fileSize: int = Field(
|
fileSize: int = Field(
|
||||||
description="Size of the file in bytes",
|
description="Size of the file in bytes",
|
||||||
json_schema_extra={"label": "Dateigroesse", "frontend_type": "integer", "frontend_readonly": True, "frontend_required": False},
|
json_schema_extra={
|
||||||
|
"label": "Dateigroesse",
|
||||||
|
"frontend_type": "integer",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
# Auto-scale byte units (B / KB / MB / GB / TB), right-aligned in tables.
|
||||||
|
"frontend_format": "R:b",
|
||||||
|
},
|
||||||
)
|
)
|
||||||
tags: Optional[List[str]] = Field(
|
tags: Optional[List[str]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
|
||||||
|
|
@ -70,6 +70,57 @@ class UserPermissions(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvoiceAddress(BaseModel):
|
||||||
|
"""
|
||||||
|
Historische strukturierte Rechnungsadresse. NICHT MEHR aktiv verwendet
|
||||||
|
-- die Felder sind seit 2026-04-20 als ``invoiceCompanyName`` /
|
||||||
|
``invoiceLine1`` / ``invoicePostalCode`` / ... direkt auf ``Mandate``
|
||||||
|
deklariert (siehe dort). Diese Klasse bleibt nur noch erhalten, falls
|
||||||
|
Bestandscode irgendwo das Schema dokumentiert oder alte JSONB-Dicts
|
||||||
|
serialisiert; sie wird vom Mandate-Modell nicht mehr referenziert.
|
||||||
|
"""
|
||||||
|
companyName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Firmenname / Empfaenger der Rechnung (falls abweichend vom Mandate.label)",
|
||||||
|
)
|
||||||
|
contactName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Ansprechperson (z. B. Buchhaltung)",
|
||||||
|
)
|
||||||
|
email: Optional[EmailStr] = Field(
|
||||||
|
default=None,
|
||||||
|
description="E-Mail-Adresse fuer den Versand der Stripe-Rechnung",
|
||||||
|
)
|
||||||
|
line1: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Strasse + Nr. (Adresszeile 1)",
|
||||||
|
)
|
||||||
|
line2: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Adresszeile 2 (z. B. c/o, Postfach)",
|
||||||
|
)
|
||||||
|
postalCode: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="PLZ",
|
||||||
|
)
|
||||||
|
city: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Ort",
|
||||||
|
)
|
||||||
|
state: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Kanton / Bundesland",
|
||||||
|
)
|
||||||
|
country: Optional[str] = Field(
|
||||||
|
default="CH",
|
||||||
|
description="ISO-3166 Alpha-2 Laendercode (Default: CH)",
|
||||||
|
)
|
||||||
|
vatNumber: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UID / MWST-Nummer des Empfaengers (z. B. CHE-123.456.789 MWST)",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@i18nModel("Mandant")
|
@i18nModel("Mandant")
|
||||||
class Mandate(PowerOnModel):
|
class Mandate(PowerOnModel):
|
||||||
"""
|
"""
|
||||||
|
|
@ -111,18 +162,194 @@ class Mandate(PowerOnModel):
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Indicates whether the mandate is enabled",
|
description="Indicates whether the mandate is enabled",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "Aktiviert"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Aktiviert",
|
||||||
|
# Render boolean as i18n-translatable label tuple [true, neutral, false].
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
isSystem: bool = Field(
|
isSystem: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description="Whether this is a system mandate (e.g. root mandate). Cannot be deleted.",
|
description="Whether this is a system mandate (e.g. root mandate). Cannot be deleted.",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False, "label": "System-Mandant"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "System-Mandant",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
deletedAt: Optional[float] = Field(
|
deletedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Timestamp when the mandate was soft-deleted. After 30 days, hard-delete is triggered.",
|
description="Timestamp when the mandate was soft-deleted. After 30 days, hard-delete is triggered.",
|
||||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gelöscht am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gelöscht am"},
|
||||||
)
|
)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Rechnungsadresse (CH-Treuhand-konform, strukturiert)
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Einzelne Felder statt eines nested Objekts/Freitexts, damit
|
||||||
|
# (a) der FormGenerator sie automatisch als Eingabezeilen rendert,
|
||||||
|
# (b) der Stripe-Checkout sie 1:1 in `customer.address`,
|
||||||
|
# `customer.email`, `customer.tax_id_data` mappen kann
|
||||||
|
# (Stripe verlangt die Adresse strukturiert, nicht als Freitext).
|
||||||
|
# ``order`` 200-209 gruppiert die Felder visuell am Ende des Formulars.
|
||||||
|
invoiceCompanyName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Firmenname / Empfaenger der Rechnung (falls abweichend vom Voller Name).",
|
||||||
|
max_length=200,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Firma",
|
||||||
|
"order": 200,
|
||||||
|
"placeholder": "Muster Treuhand AG",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceContactName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Ansprechperson z. H. (z. B. Buchhaltung).",
|
||||||
|
max_length=200,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - z. H.",
|
||||||
|
"order": 201,
|
||||||
|
"placeholder": "Buchhaltung",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceEmail: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="E-Mail-Adresse fuer den Versand der Stripe-Rechnung.",
|
||||||
|
max_length=254,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "email",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - E-Mail",
|
||||||
|
"order": 202,
|
||||||
|
"placeholder": "rechnungen@firma.ch",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceLine1: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Adresszeile 1 (Strasse + Nr.). Pflichtfeld fuer Stripe-Customer-Adresse.",
|
||||||
|
max_length=200,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Strasse + Nr.",
|
||||||
|
"order": 203,
|
||||||
|
"placeholder": "Bahnhofstrasse 1",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceLine2: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Adresszeile 2 (z. B. c/o, Postfach).",
|
||||||
|
max_length=200,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Adresszusatz",
|
||||||
|
"order": 204,
|
||||||
|
"placeholder": "c/o Buchhaltung",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoicePostalCode: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="PLZ.",
|
||||||
|
max_length=20,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - PLZ",
|
||||||
|
"order": 205,
|
||||||
|
"placeholder": "8000",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceCity: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Ort.",
|
||||||
|
max_length=100,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Ort",
|
||||||
|
"order": 206,
|
||||||
|
"placeholder": "Zuerich",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceState: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Kanton / Bundesland (optional).",
|
||||||
|
max_length=100,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Kanton",
|
||||||
|
"order": 207,
|
||||||
|
"placeholder": "ZH",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceCountry: Optional[str] = Field(
|
||||||
|
default="CH",
|
||||||
|
description="ISO-3166 Alpha-2 Laendercode (Default: CH).",
|
||||||
|
max_length=2,
|
||||||
|
pattern=r"^[A-Z]{2}$",
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - Land (ISO)",
|
||||||
|
"order": 208,
|
||||||
|
"placeholder": "CH",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
invoiceVatNumber: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UID / MWST-Nummer des Empfaengers (z. B. CHE-123.456.789 MWST). Wird Stripe als `tax_id_data` mitgegeben.",
|
||||||
|
max_length=50,
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Rechnungsadresse - UID-Nr.",
|
||||||
|
"order": 209,
|
||||||
|
"placeholder": "CHE-123.456.789 MWST",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator(
|
||||||
|
"invoiceCompanyName",
|
||||||
|
"invoiceContactName",
|
||||||
|
"invoiceEmail",
|
||||||
|
"invoiceLine1",
|
||||||
|
"invoiceLine2",
|
||||||
|
"invoicePostalCode",
|
||||||
|
"invoiceCity",
|
||||||
|
"invoiceState",
|
||||||
|
"invoiceVatNumber",
|
||||||
|
mode="before",
|
||||||
|
)
|
||||||
|
@classmethod
|
||||||
|
def _coerceInvoiceTextField(cls, v):
|
||||||
|
"""Trim incoming address strings; treat empty as ``None``."""
|
||||||
|
if v is None:
|
||||||
|
return None
|
||||||
|
if isinstance(v, str):
|
||||||
|
trimmed = v.strip()
|
||||||
|
return trimmed or None
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator("invoiceCountry", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _coerceInvoiceCountry(cls, v):
|
||||||
|
"""Normalize country code: trim, upper-case, empty -> default ``CH``."""
|
||||||
|
if v is None:
|
||||||
|
return "CH"
|
||||||
|
if isinstance(v, str):
|
||||||
|
trimmed = v.strip().upper()
|
||||||
|
return trimmed or "CH"
|
||||||
|
return v
|
||||||
|
|
||||||
@field_validator('isSystem', mode='before')
|
@field_validator('isSystem', mode='before')
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
@ -332,7 +559,13 @@ class User(PowerOnModel):
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Indicates whether the user is enabled",
|
description="Indicates whether the user is enabled",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "Aktiviert"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Aktiviert",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
isSysAdmin: bool = Field(
|
isSysAdmin: bool = Field(
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ Creates a complete demo environment with two mandates, one user,
|
||||||
and all feature instances needed for the investor live demo.
|
and all feature instances needed for the investor live demo.
|
||||||
|
|
||||||
Mandates:
|
Mandates:
|
||||||
- HappyLife AG (happylife) — Dokumentenablage, Buchhaltung, Automationen, Chatbot, Datenschutz
|
- HappyLife AG (happylife) — Dokumentenablage, Buchhaltung, Automationen, Datenschutz
|
||||||
- Alpina Treuhand AG (alpina) — Dokumentenablage, 3x Treuhand-Kunden, Automationen, Datenschutz
|
- Alpina Treuhand AG (alpina) — Dokumentenablage, 3x Treuhand-Kunden, Automationen, Datenschutz
|
||||||
|
|
||||||
User:
|
User:
|
||||||
|
|
@ -45,7 +45,6 @@ _FEATURES_HAPPYLIFE = [
|
||||||
{"code": "workspace", "label": "Dokumentenablage"},
|
{"code": "workspace", "label": "Dokumentenablage"},
|
||||||
{"code": "trustee", "label": "Buchhaltung"},
|
{"code": "trustee", "label": "Buchhaltung"},
|
||||||
{"code": "graphicalEditor", "label": "Automationen"},
|
{"code": "graphicalEditor", "label": "Automationen"},
|
||||||
{"code": "chatbot", "label": "Chatbot"},
|
|
||||||
{"code": "neutralization", "label": "Datenschutz"},
|
{"code": "neutralization", "label": "Datenschutz"},
|
||||||
]
|
]
|
||||||
_FEATURES_ALPINA = [
|
_FEATURES_ALPINA = [
|
||||||
|
|
@ -63,7 +62,7 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
label = "Investor Demo April 2026"
|
label = "Investor Demo April 2026"
|
||||||
description = (
|
description = (
|
||||||
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
||||||
"trustee with RMA, workspace, graph editor, chatbot, and neutralization."
|
"trustee with RMA, workspace, graph editor, and neutralization."
|
||||||
)
|
)
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
|
|
||||||
768
modules/demoConfigs/pwgDemo2026.py
Normal file
768
modules/demoConfigs/pwgDemo2026.py
Normal file
|
|
@ -0,0 +1,768 @@
|
||||||
|
"""PWG Pilot Demo (April 2026)
|
||||||
|
|
||||||
|
Bootstraps a complete PWG-Pilot demo environment in an empty dev/demo install:
|
||||||
|
|
||||||
|
- 1 mandate "Stiftung PWG"
|
||||||
|
- 1 SysAdmin demo user "pwg.demo"
|
||||||
|
- 4 features: workspace, trustee (BUHA PWG), graphicalEditor (PWG Automationen),
|
||||||
|
neutralization (Datenschutz)
|
||||||
|
- Trustee seed-data (5 fictitious tenants with monthly rent journal lines for
|
||||||
|
the current year, loaded from ``demoData/pwg/_seedTrusteeData.json``)
|
||||||
|
- Pilot workflow imported from
|
||||||
|
``demoData/workflows/pwg-mietzinsbestaetigung-pilot.workflow.json``
|
||||||
|
(active=false — user activates manually after triggering once).
|
||||||
|
|
||||||
|
Idempotent: ``load()`` skips anything that already exists; ``remove()`` deletes
|
||||||
|
mandate, user, seed data and imported workflow cleanly.
|
||||||
|
|
||||||
|
Pattern: subclass of :class:`_BaseDemoConfig`, auto-discovered by
|
||||||
|
``demoConfigs/__init__.py``. See ``investorDemo2026.py`` for the reference
|
||||||
|
implementation we mirror here.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from modules.demoConfigs._baseDemoConfig import _BaseDemoConfig
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_DEMO_PREFIX = "demo-pwg2026"
|
||||||
|
|
||||||
|
_MANDATE_PWG = {
|
||||||
|
"name": "stiftung-pwg",
|
||||||
|
"label": "Stiftung PWG",
|
||||||
|
}
|
||||||
|
|
||||||
|
_USER = {
|
||||||
|
"username": "pwg.demo",
|
||||||
|
"email": "pwg.demo@poweron.swiss",
|
||||||
|
"fullName": "PWG Demo Sachbearbeiter",
|
||||||
|
"password": "pwg.demo.2026",
|
||||||
|
"language": "de",
|
||||||
|
}
|
||||||
|
|
||||||
|
_FEATURES_PWG = [
|
||||||
|
{"code": "workspace", "label": "Dokumentenablage PWG"},
|
||||||
|
{"code": "trustee", "label": "Buchhaltung PWG"},
|
||||||
|
{"code": "graphicalEditor", "label": "PWG Automationen"},
|
||||||
|
{"code": "neutralization", "label": "Datenschutz"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Filename markers used to identify the imported pilot workflow on remove().
|
||||||
|
_PILOT_WORKFLOW_LABEL = "PWG Pilot: Jahresmietzinsbestätigung"
|
||||||
|
_PILOT_WORKFLOW_FILE = "pwg-mietzinsbestaetigung-pilot.workflow.json"
|
||||||
|
_SEED_TRUSTEE_FILE = "_seedTrusteeData.json"
|
||||||
|
|
||||||
|
|
||||||
|
class PwgDemo2026(_BaseDemoConfig):
|
||||||
|
code = "pwg-demo-2026"
|
||||||
|
label = "PWG Pilot Demo (Mietzinsbestätigungen)"
|
||||||
|
description = (
|
||||||
|
"Stiftung PWG, ein Demo-Sachbearbeiter, Trustee mit fiktiven Mietern, "
|
||||||
|
"Graph-Editor mit dem Pilot-Workflow für Jahresmietzinsbestätigungen "
|
||||||
|
"(als File importiert, active=false). Idempotent."
|
||||||
|
)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# load
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def load(self, db) -> Dict[str, Any]:
|
||||||
|
summary: Dict[str, Any] = {"created": [], "skipped": [], "errors": []}
|
||||||
|
|
||||||
|
try:
|
||||||
|
mandateId = self._ensureMandate(db, _MANDATE_PWG, summary)
|
||||||
|
userId = self._ensureUser(db, summary)
|
||||||
|
self._ensurePlatformAdminFlag(db, userId, summary)
|
||||||
|
|
||||||
|
if mandateId and userId:
|
||||||
|
self._ensureMembership(db, userId, mandateId, _MANDATE_PWG["label"], summary)
|
||||||
|
self._ensureFeatures(db, mandateId, _MANDATE_PWG["label"], _FEATURES_PWG, summary)
|
||||||
|
self._ensureFeatureAccess(db, userId, mandateId, _MANDATE_PWG["label"], summary)
|
||||||
|
self._ensureNeutralizationConfig(db, mandateId, userId, summary)
|
||||||
|
self._ensureBilling(db, mandateId, _MANDATE_PWG["label"], summary)
|
||||||
|
|
||||||
|
trusteeInstanceId = self._getFeatureInstanceId(db, mandateId, "trustee", "Buchhaltung PWG")
|
||||||
|
if trusteeInstanceId:
|
||||||
|
self._ensureTrusteeSeed(mandateId, trusteeInstanceId, summary)
|
||||||
|
|
||||||
|
graphInstanceId = self._getFeatureInstanceId(db, mandateId, "graphicalEditor", "PWG Automationen")
|
||||||
|
if graphInstanceId:
|
||||||
|
self._ensurePilotWorkflow(mandateId, graphInstanceId, summary)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"PWG demo load failed: {e}", exc_info=True)
|
||||||
|
summary["errors"].append(str(e))
|
||||||
|
|
||||||
|
return summary
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# remove
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def remove(self, db) -> Dict[str, Any]:
|
||||||
|
summary: Dict[str, Any] = {"removed": [], "errors": []}
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelMembership import UserMandate
|
||||||
|
from modules.datamodels.datamodelUam import Mandate, UserInDB
|
||||||
|
|
||||||
|
try:
|
||||||
|
existing = db.getRecordset(Mandate, recordFilter={"name": _MANDATE_PWG["name"]})
|
||||||
|
for m in existing:
|
||||||
|
mid = m.get("id")
|
||||||
|
self._removeMandateData(db, mid, _MANDATE_PWG["label"], summary)
|
||||||
|
db.recordDelete(Mandate, mid)
|
||||||
|
summary["removed"].append(f"Mandate {_MANDATE_PWG['label']} ({mid})")
|
||||||
|
logger.info(f"Removed mandate {_MANDATE_PWG['label']} ({mid})")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Remove mandate {_MANDATE_PWG['label']}: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
existing = db.getRecordset(UserInDB, recordFilter={"username": _USER["username"]})
|
||||||
|
for u in existing:
|
||||||
|
uid = u.get("id")
|
||||||
|
memberships = db.getRecordset(UserMandate, recordFilter={"userId": uid}) or []
|
||||||
|
for mem in memberships:
|
||||||
|
try:
|
||||||
|
db.recordDelete(UserMandate, mem.get("id"))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
db.recordDelete(UserInDB, uid)
|
||||||
|
summary["removed"].append(f"User {_USER['username']} ({uid})")
|
||||||
|
logger.info(f"Removed user {_USER['username']} ({uid})")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Remove user: {e}")
|
||||||
|
|
||||||
|
return summary
|
||||||
|
|
||||||
|
# ==================================================================
|
||||||
|
# — load helpers (mostly mirrors of investorDemo2026.py)
|
||||||
|
# ==================================================================
|
||||||
|
|
||||||
|
def _ensureMandate(self, db, mandateDef: Dict, summary: Dict) -> Optional[str]:
|
||||||
|
from modules.datamodels.datamodelUam import Mandate
|
||||||
|
from modules.interfaces.interfaceBootstrap import copySystemRolesToMandate
|
||||||
|
|
||||||
|
existing = db.getRecordset(Mandate, recordFilter={"name": mandateDef["name"]})
|
||||||
|
if existing:
|
||||||
|
mid = existing[0].get("id")
|
||||||
|
summary["skipped"].append(f"Mandate {mandateDef['label']} exists ({mid})")
|
||||||
|
return mid
|
||||||
|
|
||||||
|
mandate = Mandate(name=mandateDef["name"], label=mandateDef["label"], enabled=True)
|
||||||
|
created = db.recordCreate(Mandate, mandate)
|
||||||
|
mid = created.get("id")
|
||||||
|
logger.info(f"Created mandate {mandateDef['label']} ({mid})")
|
||||||
|
summary["created"].append(f"Mandate {mandateDef['label']}")
|
||||||
|
copySystemRolesToMandate(db, mid)
|
||||||
|
return mid
|
||||||
|
|
||||||
|
def _ensureUser(self, db, summary: Dict) -> Optional[str]:
|
||||||
|
from modules.datamodels.datamodelUam import AuthAuthority, UserInDB
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
existing = db.getRecordset(UserInDB, recordFilter={"username": _USER["username"]})
|
||||||
|
if existing:
|
||||||
|
uid = existing[0].get("id")
|
||||||
|
summary["skipped"].append(f"User {_USER['username']} exists ({uid})")
|
||||||
|
return uid
|
||||||
|
|
||||||
|
pwdContext = CryptContext(schemes=["argon2"], deprecated="auto")
|
||||||
|
user = UserInDB(
|
||||||
|
username=_USER["username"],
|
||||||
|
email=_USER["email"],
|
||||||
|
fullName=_USER["fullName"],
|
||||||
|
enabled=True,
|
||||||
|
language=_USER["language"],
|
||||||
|
isSysAdmin=True,
|
||||||
|
authenticationAuthority=AuthAuthority.LOCAL,
|
||||||
|
hashedPassword=pwdContext.hash(_USER["password"]),
|
||||||
|
)
|
||||||
|
created = db.recordCreate(UserInDB, user)
|
||||||
|
uid = created.get("id")
|
||||||
|
logger.info(f"Created user {_USER['username']} ({uid})")
|
||||||
|
summary["created"].append(f"User {_USER['fullName']}")
|
||||||
|
return uid
|
||||||
|
|
||||||
|
def _ensurePlatformAdminFlag(self, db, userId: Optional[str], summary: Dict):
|
||||||
|
from modules.datamodels.datamodelUam import UserInDB
|
||||||
|
if not userId:
|
||||||
|
return
|
||||||
|
existing = db.getRecord(UserInDB, userId)
|
||||||
|
if not existing:
|
||||||
|
summary["errors"].append(f"User {userId} not found — cannot set isPlatformAdmin")
|
||||||
|
return
|
||||||
|
currentFlag = bool(existing.get("isPlatformAdmin", False)) if isinstance(existing, dict) else bool(getattr(existing, "isPlatformAdmin", False))
|
||||||
|
if currentFlag:
|
||||||
|
summary["skipped"].append("isPlatformAdmin already set")
|
||||||
|
return
|
||||||
|
db.recordModify(UserInDB, userId, {"isPlatformAdmin": True})
|
||||||
|
summary["created"].append("isPlatformAdmin flag")
|
||||||
|
|
||||||
|
def _ensureMembership(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
|
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole
|
||||||
|
from modules.datamodels.datamodelRbac import Role
|
||||||
|
|
||||||
|
existing = db.getRecordset(UserMandate, recordFilter={"userId": userId, "mandateId": mandateId})
|
||||||
|
if existing:
|
||||||
|
userMandateId = existing[0].get("id")
|
||||||
|
summary["skipped"].append(f"Membership {_USER['username']} -> {mandateLabel} exists")
|
||||||
|
else:
|
||||||
|
um = UserMandate(userId=userId, mandateId=mandateId, enabled=True)
|
||||||
|
created = db.recordCreate(UserMandate, um)
|
||||||
|
userMandateId = created.get("id")
|
||||||
|
summary["created"].append(f"Membership {_USER['username']} -> {mandateLabel}")
|
||||||
|
|
||||||
|
adminRoles = db.getRecordset(Role, recordFilter={"mandateId": mandateId, "roleLabel": "admin"})
|
||||||
|
if adminRoles:
|
||||||
|
adminRoleId = adminRoles[0].get("id")
|
||||||
|
existingRole = db.getRecordset(UserMandateRole, recordFilter={"userMandateId": userMandateId, "roleId": adminRoleId})
|
||||||
|
if not existingRole:
|
||||||
|
umr = UserMandateRole(userMandateId=userMandateId, roleId=adminRoleId)
|
||||||
|
db.recordCreate(UserMandateRole, umr)
|
||||||
|
|
||||||
|
def _ensureFeatures(self, db, mandateId: str, mandateLabel: str, featureDefs: List[Dict], summary: Dict):
|
||||||
|
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||||
|
|
||||||
|
fi = getFeatureInterface(db)
|
||||||
|
existingInstances = fi.getFeatureInstancesForMandate(mandateId)
|
||||||
|
existingLabels = {
|
||||||
|
(inst.label if hasattr(inst, "label") else inst.get("label", ""))
|
||||||
|
for inst in existingInstances
|
||||||
|
}
|
||||||
|
|
||||||
|
for featureDef in featureDefs:
|
||||||
|
code = featureDef["code"]
|
||||||
|
instanceLabel = featureDef["label"]
|
||||||
|
if instanceLabel in existingLabels:
|
||||||
|
summary["skipped"].append(f"Feature '{instanceLabel}' in {mandateLabel} exists")
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
fi.createFeatureInstance(
|
||||||
|
featureCode=code,
|
||||||
|
mandateId=mandateId,
|
||||||
|
label=instanceLabel,
|
||||||
|
enabled=True,
|
||||||
|
copyTemplateRoles=True,
|
||||||
|
)
|
||||||
|
summary["created"].append(f"Feature '{instanceLabel}' in {mandateLabel}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
|
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||||
|
from modules.datamodels.datamodelRbac import Role
|
||||||
|
|
||||||
|
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
|
||||||
|
for inst in instances:
|
||||||
|
instId = inst.get("id")
|
||||||
|
featureCode = inst.get("featureCode", "")
|
||||||
|
if not instId:
|
||||||
|
continue
|
||||||
|
|
||||||
|
existing = db.getRecordset(FeatureAccess, recordFilter={"userId": userId, "featureInstanceId": instId})
|
||||||
|
if existing:
|
||||||
|
featureAccessId = existing[0].get("id")
|
||||||
|
summary["skipped"].append(f"FeatureAccess {featureCode} in {mandateLabel} exists")
|
||||||
|
else:
|
||||||
|
fa = FeatureAccess(userId=userId, featureInstanceId=instId, enabled=True)
|
||||||
|
created = db.recordCreate(FeatureAccess, fa)
|
||||||
|
featureAccessId = created.get("id")
|
||||||
|
summary["created"].append(f"FeatureAccess {featureCode} in {mandateLabel}")
|
||||||
|
|
||||||
|
adminRoleLabel = f"{featureCode}-admin"
|
||||||
|
adminRoles = db.getRecordset(Role, recordFilter={
|
||||||
|
"featureInstanceId": instId,
|
||||||
|
"roleLabel": adminRoleLabel,
|
||||||
|
})
|
||||||
|
if adminRoles:
|
||||||
|
adminRoleId = adminRoles[0].get("id")
|
||||||
|
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||||
|
"featureAccessId": featureAccessId,
|
||||||
|
"roleId": adminRoleId,
|
||||||
|
})
|
||||||
|
if not existingRole:
|
||||||
|
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||||
|
db.recordCreate(FeatureAccessRole, far)
|
||||||
|
|
||||||
|
def _ensureNeutralizationConfig(self, db, mandateId: Optional[str], userId: Optional[str], summary: Dict):
|
||||||
|
if not mandateId or not userId:
|
||||||
|
return
|
||||||
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
|
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId, "featureCode": "neutralization"})
|
||||||
|
if not instances:
|
||||||
|
return
|
||||||
|
instanceId = instances[0].get("id")
|
||||||
|
try:
|
||||||
|
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutraliserConfig
|
||||||
|
existing = db.getRecordset(DataNeutraliserConfig, recordFilter={"featureInstanceId": instanceId})
|
||||||
|
if existing:
|
||||||
|
summary["skipped"].append(f"Neutralization config for mandate {mandateId} exists")
|
||||||
|
return
|
||||||
|
config = DataNeutraliserConfig(
|
||||||
|
featureInstanceId=instanceId,
|
||||||
|
mandateId=mandateId,
|
||||||
|
userId=userId,
|
||||||
|
enabled=True,
|
||||||
|
scope="featureInstance",
|
||||||
|
)
|
||||||
|
db.recordCreate(DataNeutraliserConfig, config)
|
||||||
|
summary["created"].append(f"Neutralization config for mandate {mandateId}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Neutralization config: {e}")
|
||||||
|
|
||||||
|
def _ensureBilling(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
|
||||||
|
if not mandateId:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
from modules.datamodels.datamodelBilling import BillingSettings
|
||||||
|
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||||
|
|
||||||
|
billingInterface = _getRootInterface()
|
||||||
|
existingSettings = billingInterface.getSettings(mandateId)
|
||||||
|
if existingSettings:
|
||||||
|
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||||
|
return
|
||||||
|
settings = BillingSettings(
|
||||||
|
mandateId=mandateId,
|
||||||
|
warningThresholdPercent=10.0,
|
||||||
|
notifyOnWarning=True,
|
||||||
|
)
|
||||||
|
billingInterface.db.recordCreate(BillingSettings, settings)
|
||||||
|
summary["created"].append(f"Billing settings for {mandateLabel}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Billing for {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
def _getFeatureInstanceId(self, db, mandateId: str, featureCode: str, label: str) -> Optional[str]:
|
||||||
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
|
instances = db.getRecordset(FeatureInstance, recordFilter={
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureCode": featureCode,
|
||||||
|
"label": label,
|
||||||
|
}) or []
|
||||||
|
if instances:
|
||||||
|
return instances[0].get("id")
|
||||||
|
# fallback: any instance of that feature in the mandate
|
||||||
|
instances = db.getRecordset(FeatureInstance, recordFilter={
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureCode": featureCode,
|
||||||
|
}) or []
|
||||||
|
return instances[0].get("id") if instances else None
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# PWG-specific helpers — Trustee seed-data + pilot-workflow import
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _ensureTrusteeSeed(self, mandateId: str, featureInstanceId: str, summary: Dict):
|
||||||
|
"""Idempotently load 5 fictitious tenants and their 12-month rent
|
||||||
|
journal lines into the trustee database for this feature instance.
|
||||||
|
|
||||||
|
Skips any tenant whose contact (matched by name+address) already
|
||||||
|
exists, so re-running ``load()`` is safe.
|
||||||
|
"""
|
||||||
|
seedPath = _demoDataDir() / "pwg" / _SEED_TRUSTEE_FILE
|
||||||
|
if not seedPath.is_file():
|
||||||
|
summary["errors"].append(f"PWG seed file missing: {seedPath}")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
seed = json.loads(seedPath.read_text(encoding="utf-8"))
|
||||||
|
except Exception as exc:
|
||||||
|
summary["errors"].append(f"PWG seed file unreadable: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
trusteeDb = _openTrusteeDb()
|
||||||
|
except Exception as exc:
|
||||||
|
summary["errors"].append(f"Trustee DB connection failed: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
from modules.features.trustee.datamodelFeatureTrustee import (
|
||||||
|
TrusteeDataAccount,
|
||||||
|
TrusteeDataContact,
|
||||||
|
TrusteeDataJournalEntry,
|
||||||
|
TrusteeDataJournalLine,
|
||||||
|
)
|
||||||
|
|
||||||
|
rentAccountNumber = str(seed.get("rentAccount", "6000"))
|
||||||
|
year = int(seed.get("year", datetime.now().year))
|
||||||
|
|
||||||
|
# 1) Ensure rent account exists once
|
||||||
|
existingAccounts = trusteeDb.getRecordset(TrusteeDataAccount, recordFilter={
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"accountNumber": rentAccountNumber,
|
||||||
|
}) or []
|
||||||
|
if not existingAccounts:
|
||||||
|
trusteeDb.recordCreate(TrusteeDataAccount, TrusteeDataAccount(
|
||||||
|
accountNumber=rentAccountNumber,
|
||||||
|
label=str(seed.get("rentAccountLabel", "Mietzinsertrag")),
|
||||||
|
accountType="revenue",
|
||||||
|
accountGroup="rental_income",
|
||||||
|
currency="CHF",
|
||||||
|
isActive=True,
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
))
|
||||||
|
summary["created"].append(f"Trustee account {rentAccountNumber}")
|
||||||
|
|
||||||
|
# 2) Ensure contacts + monthly journal entries
|
||||||
|
createdTenants = 0
|
||||||
|
skippedTenants = 0
|
||||||
|
for tenant in seed.get("tenants", []):
|
||||||
|
name = tenant.get("name", "")
|
||||||
|
address = tenant.get("address", "")
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
existing = trusteeDb.getRecordset(TrusteeDataContact, recordFilter={
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"name": name,
|
||||||
|
"address": address,
|
||||||
|
}) or []
|
||||||
|
if existing:
|
||||||
|
skippedTenants += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
contact = TrusteeDataContact(
|
||||||
|
externalId=tenant.get("contactNumber"),
|
||||||
|
contactType="customer",
|
||||||
|
contactNumber=tenant.get("contactNumber"),
|
||||||
|
name=name,
|
||||||
|
address=address,
|
||||||
|
zip=tenant.get("zip"),
|
||||||
|
city=tenant.get("city"),
|
||||||
|
country=tenant.get("country"),
|
||||||
|
email=tenant.get("email"),
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
)
|
||||||
|
trusteeDb.recordCreate(TrusteeDataContact, contact)
|
||||||
|
createdTenants += 1
|
||||||
|
|
||||||
|
# 12 monthly rent bookings (credit on rent account)
|
||||||
|
monthlyRent = float(tenant.get("monthlyRentChf") or 0.0)
|
||||||
|
if monthlyRent <= 0:
|
||||||
|
continue
|
||||||
|
for month in range(1, 13):
|
||||||
|
bookingDate = f"{year}-{month:02d}-01"
|
||||||
|
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
||||||
|
entry = TrusteeDataJournalEntry(
|
||||||
|
externalId=entryRef,
|
||||||
|
bookingDate=bookingDate,
|
||||||
|
reference=entryRef,
|
||||||
|
description=f"Mietzins {month:02d}/{year} {name}",
|
||||||
|
currency="CHF",
|
||||||
|
totalAmount=monthlyRent,
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
)
|
||||||
|
createdEntry = trusteeDb.recordCreate(TrusteeDataJournalEntry, entry)
|
||||||
|
line = TrusteeDataJournalLine(
|
||||||
|
journalEntryId=createdEntry.get("id"),
|
||||||
|
accountNumber=rentAccountNumber,
|
||||||
|
debitAmount=0.0,
|
||||||
|
creditAmount=monthlyRent,
|
||||||
|
currency="CHF",
|
||||||
|
description=f"Mietzins {month:02d}/{year} {name} ({tenant.get('contactNumber')})",
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
)
|
||||||
|
trusteeDb.recordCreate(TrusteeDataJournalLine, line)
|
||||||
|
|
||||||
|
if createdTenants:
|
||||||
|
summary["created"].append(f"PWG seed: {createdTenants} tenants × 12 monthly journal lines")
|
||||||
|
if skippedTenants:
|
||||||
|
summary["skipped"].append(f"PWG seed: {skippedTenants} tenants already present")
|
||||||
|
|
||||||
|
def _ensurePilotWorkflow(self, mandateId: str, featureInstanceId: str, summary: Dict):
|
||||||
|
"""Import the pilot workflow JSON into the graphical-editor DB.
|
||||||
|
|
||||||
|
Uses the schema-aware import pipeline introduced in Phase 1
|
||||||
|
(``_workflowFileSchema.envelopeToWorkflowData`` +
|
||||||
|
``GraphicalEditorObjects.importWorkflowFromDict``). The workflow is
|
||||||
|
always created with ``active=False`` so a manual trigger is required
|
||||||
|
— this matches the demo-bootstrap safety default.
|
||||||
|
"""
|
||||||
|
envelopePath = _demoDataDir() / "workflows" / _PILOT_WORKFLOW_FILE
|
||||||
|
if not envelopePath.is_file():
|
||||||
|
summary["errors"].append(f"Pilot workflow file missing: {envelopePath}")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
envelope = json.loads(envelopePath.read_text(encoding="utf-8"))
|
||||||
|
except Exception as exc:
|
||||||
|
summary["errors"].append(f"Pilot workflow file unreadable: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
geDb = _openGraphicalEditorDb()
|
||||||
|
except Exception as exc:
|
||||||
|
summary["errors"].append(f"GraphicalEditor DB connection failed: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor._workflowFileSchema import (
|
||||||
|
envelopeToWorkflowData,
|
||||||
|
validateFileEnvelope,
|
||||||
|
)
|
||||||
|
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||||
|
from modules.features.graphicalEditor.nodeRegistry import STATIC_NODE_TYPES
|
||||||
|
|
||||||
|
existing = geDb.getRecordset(AutoWorkflow, recordFilter={
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"label": _PILOT_WORKFLOW_LABEL,
|
||||||
|
}) or []
|
||||||
|
if existing:
|
||||||
|
summary["skipped"].append(f"Pilot workflow already imported ({existing[0].get('id')})")
|
||||||
|
return
|
||||||
|
|
||||||
|
knownTypes = [n.get("id") for n in STATIC_NODE_TYPES if isinstance(n, dict) and n.get("id")]
|
||||||
|
try:
|
||||||
|
normalized, warnings = validateFileEnvelope(envelope, knownNodeTypes=knownTypes)
|
||||||
|
except Exception as exc:
|
||||||
|
summary["errors"].append(f"Pilot workflow envelope invalid: {exc}")
|
||||||
|
return
|
||||||
|
if warnings:
|
||||||
|
summary["created"].append(f"Pilot workflow warnings: {warnings}")
|
||||||
|
|
||||||
|
data = envelopeToWorkflowData(
|
||||||
|
normalized,
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
)
|
||||||
|
# Inject the trustee feature-instance id into the parameters so the
|
||||||
|
# node runtime resolves it without manual editor cleanup.
|
||||||
|
trusteeInstanceId = self._guessTrusteeInstanceId(mandateId)
|
||||||
|
if trusteeInstanceId:
|
||||||
|
for node in data.get("graph", {}).get("nodes", []) or []:
|
||||||
|
params = node.get("parameters") or {}
|
||||||
|
if "featureInstanceId" in params and not params["featureInstanceId"]:
|
||||||
|
params["featureInstanceId"] = trusteeInstanceId
|
||||||
|
node["parameters"] = params
|
||||||
|
|
||||||
|
# Force-import: AutoWorkflow.create accepts our envelope-derived data
|
||||||
|
# (graph, label, invocations, …) verbatim; we add ids/timestamps that
|
||||||
|
# AutoWorkflow expects.
|
||||||
|
record = AutoWorkflow(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
mandateId=mandateId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
label=data.get("label") or _PILOT_WORKFLOW_LABEL,
|
||||||
|
description=data.get("description") or "",
|
||||||
|
tags=data.get("tags") or [],
|
||||||
|
graph=data.get("graph") or {"nodes": [], "connections": []},
|
||||||
|
invocations=data.get("invocations") or [],
|
||||||
|
templateScope=data.get("templateScope") or "instance",
|
||||||
|
sharedReadOnly=bool(data.get("sharedReadOnly")),
|
||||||
|
notifyOnFailure=bool(data.get("notifyOnFailure", True)),
|
||||||
|
active=False,
|
||||||
|
)
|
||||||
|
created = geDb.recordCreate(AutoWorkflow, record)
|
||||||
|
summary["created"].append(f"Pilot workflow imported (active=false, id={created.get('id')})")
|
||||||
|
logger.info(f"Imported pilot workflow into graphicalEditor instance {featureInstanceId}")
|
||||||
|
|
||||||
|
def _guessTrusteeInstanceId(self, mandateId: str) -> Optional[str]:
|
||||||
|
"""Return the first trustee feature-instance id of the given mandate.
|
||||||
|
|
||||||
|
The demo only ever creates one trustee feature in this mandate, so a
|
||||||
|
first-hit lookup is sufficient and avoids depending on the label.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
appDb = DatabaseConnector(
|
||||||
|
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
|
||||||
|
dbDatabase="poweron_app",
|
||||||
|
dbUser=APP_CONFIG.get("DB_USER"),
|
||||||
|
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
|
||||||
|
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||||
|
userId=None,
|
||||||
|
)
|
||||||
|
instances = appDb.getRecordset(FeatureInstance, recordFilter={
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureCode": "trustee",
|
||||||
|
}) or []
|
||||||
|
return instances[0].get("id") if instances else None
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(f"Could not resolve trustee instance for mandate {mandateId}: {exc}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# remove helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _removeMandateData(self, db, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
|
"""Cascade-delete everything created by load() for this mandate."""
|
||||||
|
from modules.datamodels.datamodelBilling import BillingSettings
|
||||||
|
from modules.datamodels.datamodelChat import ChatLog, ChatMessage, ChatWorkflow
|
||||||
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
|
from modules.datamodels.datamodelMembership import (
|
||||||
|
FeatureAccess,
|
||||||
|
FeatureAccessRole,
|
||||||
|
UserMandate,
|
||||||
|
UserMandateRole,
|
||||||
|
)
|
||||||
|
from modules.datamodels.datamodelRbac import AccessRule, Role
|
||||||
|
|
||||||
|
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
for inst in instances:
|
||||||
|
instId = inst.get("id")
|
||||||
|
featureCode = inst.get("featureCode", "")
|
||||||
|
if not instId:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if featureCode == "graphicalEditor":
|
||||||
|
self._removeGraphicalEditorData(instId, mandateId, mandateLabel, summary)
|
||||||
|
if featureCode == "trustee":
|
||||||
|
self._removeTrusteeSeed(instId, mandateLabel, summary)
|
||||||
|
if featureCode == "neutralization":
|
||||||
|
self._removeNeutralizationData(db, instId, mandateLabel, summary)
|
||||||
|
|
||||||
|
chatWorkflows = db.getRecordset(ChatWorkflow, recordFilter={"featureInstanceId": instId}) or []
|
||||||
|
for wf in chatWorkflows:
|
||||||
|
wfId = wf.get("id")
|
||||||
|
for msg in db.getRecordset(ChatMessage, recordFilter={"workflowId": wfId}) or []:
|
||||||
|
db.recordDelete(ChatMessage, msg.get("id"))
|
||||||
|
for log in db.getRecordset(ChatLog, recordFilter={"workflowId": wfId}) or []:
|
||||||
|
db.recordDelete(ChatLog, log.get("id"))
|
||||||
|
db.recordDelete(ChatWorkflow, wfId)
|
||||||
|
|
||||||
|
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instId}) or []
|
||||||
|
for access in accesses:
|
||||||
|
for role in db.getRecordset(FeatureAccessRole, recordFilter={"featureAccessId": access.get("id")}) or []:
|
||||||
|
db.recordDelete(FeatureAccessRole, role.get("id"))
|
||||||
|
db.recordDelete(FeatureAccess, access.get("id"))
|
||||||
|
|
||||||
|
db.recordDelete(FeatureInstance, instId)
|
||||||
|
summary["removed"].append(f"FeatureInstance {featureCode} in {mandateLabel}")
|
||||||
|
|
||||||
|
memberships = db.getRecordset(UserMandate, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
for um in memberships:
|
||||||
|
for umr in db.getRecordset(UserMandateRole, recordFilter={"userMandateId": um.get("id")}) or []:
|
||||||
|
db.recordDelete(UserMandateRole, umr.get("id"))
|
||||||
|
db.recordDelete(UserMandate, um.get("id"))
|
||||||
|
|
||||||
|
roles = db.getRecordset(Role, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
for role in roles:
|
||||||
|
for rule in db.getRecordset(AccessRule, recordFilter={"roleId": role.get("id")}) or []:
|
||||||
|
db.recordDelete(AccessRule, rule.get("id"))
|
||||||
|
db.recordDelete(Role, role.get("id"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||||
|
billingDb = _getRootInterface().db
|
||||||
|
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
for bs in billingSettings:
|
||||||
|
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Billing cleanup for {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
def _removeGraphicalEditorData(self, featureInstanceId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
|
try:
|
||||||
|
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
|
||||||
|
AutoRun,
|
||||||
|
AutoStepLog,
|
||||||
|
AutoTask,
|
||||||
|
AutoVersion,
|
||||||
|
AutoWorkflow,
|
||||||
|
)
|
||||||
|
geDb = _openGraphicalEditorDb()
|
||||||
|
workflows = geDb.getRecordset(AutoWorkflow, recordFilter={
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
}) or []
|
||||||
|
for wf in workflows:
|
||||||
|
wfId = wf.get("id")
|
||||||
|
for version in geDb.getRecordset(AutoVersion, recordFilter={"workflowId": wfId}) or []:
|
||||||
|
geDb.recordDelete(AutoVersion, version.get("id"))
|
||||||
|
for run in geDb.getRecordset(AutoRun, recordFilter={"workflowId": wfId}) or []:
|
||||||
|
runId = run.get("id")
|
||||||
|
for step in geDb.getRecordset(AutoStepLog, recordFilter={"runId": runId}) or []:
|
||||||
|
geDb.recordDelete(AutoStepLog, step.get("id"))
|
||||||
|
geDb.recordDelete(AutoRun, runId)
|
||||||
|
for task in geDb.getRecordset(AutoTask, recordFilter={"workflowId": wfId}) or []:
|
||||||
|
geDb.recordDelete(AutoTask, task.get("id"))
|
||||||
|
geDb.recordDelete(AutoWorkflow, wfId)
|
||||||
|
if workflows:
|
||||||
|
summary["removed"].append(f"{len(workflows)} AutoWorkflows in {mandateLabel}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"GraphicalEditor cleanup for {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
def _removeTrusteeSeed(self, featureInstanceId: str, mandateLabel: str, summary: Dict):
|
||||||
|
try:
|
||||||
|
from modules.features.trustee.datamodelFeatureTrustee import (
|
||||||
|
TrusteeAccountingConfig,
|
||||||
|
TrusteeDataAccount,
|
||||||
|
TrusteeDataContact,
|
||||||
|
TrusteeDataJournalEntry,
|
||||||
|
TrusteeDataJournalLine,
|
||||||
|
)
|
||||||
|
trusteeDb = _openTrusteeDb()
|
||||||
|
for model in (
|
||||||
|
TrusteeDataJournalLine,
|
||||||
|
TrusteeDataJournalEntry,
|
||||||
|
TrusteeDataContact,
|
||||||
|
TrusteeDataAccount,
|
||||||
|
TrusteeAccountingConfig,
|
||||||
|
):
|
||||||
|
rows = trusteeDb.getRecordset(model, recordFilter={"featureInstanceId": featureInstanceId}) or []
|
||||||
|
for row in rows:
|
||||||
|
trusteeDb.recordDelete(model, row.get("id"))
|
||||||
|
if rows:
|
||||||
|
summary["removed"].append(f"{len(rows)} {model.__name__} in {mandateLabel}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Trustee cleanup for {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
def _removeNeutralizationData(self, db, featureInstanceId: str, mandateLabel: str, summary: Dict):
|
||||||
|
try:
|
||||||
|
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutraliserConfig
|
||||||
|
configs = db.getRecordset(DataNeutraliserConfig, recordFilter={"featureInstanceId": featureInstanceId}) or []
|
||||||
|
for cfg in configs:
|
||||||
|
db.recordDelete(DataNeutraliserConfig, cfg.get("id"))
|
||||||
|
if configs:
|
||||||
|
summary["removed"].append(f"DataNeutraliserConfig in {mandateLabel}")
|
||||||
|
except Exception as e:
|
||||||
|
summary["errors"].append(f"Neutralization cleanup for {mandateLabel}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# Module-level helpers (private)
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _demoDataDir() -> Path:
|
||||||
|
"""Return absolute path to ``gateway/demoData`` regardless of CWD."""
|
||||||
|
# __file__ = .../gateway/modules/demoConfigs/pwgDemo2026.py
|
||||||
|
return Path(__file__).resolve().parents[2] / "demoData"
|
||||||
|
|
||||||
|
|
||||||
|
def _openTrusteeDb():
|
||||||
|
"""Open a privileged DB connection to ``poweron_trustee`` (used by both
|
||||||
|
seed and remove paths so they work consistently)."""
|
||||||
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
return DatabaseConnector(
|
||||||
|
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
|
||||||
|
dbDatabase="poweron_trustee",
|
||||||
|
dbUser=APP_CONFIG.get("DB_USER"),
|
||||||
|
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
|
||||||
|
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||||
|
userId=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _openGraphicalEditorDb():
|
||||||
|
"""Open a privileged DB connection to ``poweron_graphicaleditor``."""
|
||||||
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
return DatabaseConnector(
|
||||||
|
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
|
||||||
|
dbDatabase="poweron_graphicaleditor",
|
||||||
|
dbUser=APP_CONFIG.get("DB_USER"),
|
||||||
|
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
|
||||||
|
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||||
|
userId=None,
|
||||||
|
)
|
||||||
|
|
@ -8,18 +8,20 @@ Handles feature initialization and RBAC catalog registration.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any, Optional
|
from typing import Dict, List, Any, Optional
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Feature metadata
|
# Feature metadata
|
||||||
FEATURE_CODE = "chatbot"
|
FEATURE_CODE = "chatbot"
|
||||||
FEATURE_LABEL = "Chatbot"
|
FEATURE_LABEL = t("Chatbot", context="UI")
|
||||||
FEATURE_ICON = "mdi-robot"
|
FEATURE_ICON = "mdi-robot"
|
||||||
|
|
||||||
# UI Objects for RBAC catalog
|
# UI Objects for RBAC catalog
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.chatbot.conversations",
|
"objectKey": "ui.feature.chatbot.conversations",
|
||||||
"label": "Konversationen",
|
"label": t("Konversationen", context="UI"),
|
||||||
"meta": {"area": "conversations"}
|
"meta": {"area": "conversations"}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
@ -28,22 +30,22 @@ UI_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.chatbot.startStream",
|
"objectKey": "resource.feature.chatbot.startStream",
|
||||||
"label": "Chat starten (Stream)",
|
"label": t("Chat starten (Stream)", context="UI"),
|
||||||
"meta": {"endpoint": "/api/chatbot/{instanceId}/start/stream", "method": "POST"}
|
"meta": {"endpoint": "/api/chatbot/{instanceId}/start/stream", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.chatbot.stop",
|
"objectKey": "resource.feature.chatbot.stop",
|
||||||
"label": "Chat stoppen",
|
"label": t("Chat stoppen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/chatbot/{instanceId}/stop/{workflowId}", "method": "POST"}
|
"meta": {"endpoint": "/api/chatbot/{instanceId}/stop/{workflowId}", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.chatbot.threads",
|
"objectKey": "resource.feature.chatbot.threads",
|
||||||
"label": "Threads abrufen",
|
"label": t("Threads abrufen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/chatbot/{instanceId}/threads", "method": "GET"}
|
"meta": {"endpoint": "/api/chatbot/{instanceId}/threads", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.chatbot.delete",
|
"objectKey": "resource.feature.chatbot.delete",
|
||||||
"label": "Chat löschen",
|
"label": t("Chat löschen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/chatbot/{instanceId}/{workflowId}", "method": "DELETE"}
|
"meta": {"endpoint": "/api/chatbot/{instanceId}/{workflowId}", "method": "DELETE"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -8,89 +8,122 @@ Handles feature initialization and RBAC catalog registration.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
FEATURE_CODE = "commcoach"
|
FEATURE_CODE = "commcoach"
|
||||||
FEATURE_LABEL = "Kommunikations-Coach"
|
FEATURE_LABEL = t("Kommunikations-Coach", context="UI")
|
||||||
FEATURE_ICON = "mdi-account-voice"
|
FEATURE_ICON = "mdi-account-voice"
|
||||||
|
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.commcoach.dashboard",
|
"objectKey": "ui.feature.commcoach.dashboard",
|
||||||
"label": "Dashboard",
|
"label": t("Dashboard", context="UI"),
|
||||||
"meta": {"area": "dashboard"}
|
"meta": {"area": "dashboard"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.commcoach.coaching",
|
"objectKey": "ui.feature.commcoach.coaching",
|
||||||
"label": "Arbeitsthemen",
|
"label": t("Arbeitsthemen", context="UI"),
|
||||||
"meta": {"area": "coaching"}
|
"meta": {"area": "coaching"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.commcoach.settings",
|
"objectKey": "ui.feature.commcoach.settings",
|
||||||
"label": "Einstellungen",
|
"label": t("Einstellungen", context="UI"),
|
||||||
"meta": {"area": "settings"}
|
"meta": {"area": "settings"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
DATA_OBJECTS = [
|
DATA_OBJECTS = [
|
||||||
|
# ── Record-Hierarchie: Context → Session → Message/Score, Context → Task ──
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingContext",
|
"objectKey": "data.feature.commcoach.CoachingContext",
|
||||||
"label": "Coaching-Kontext",
|
"label": t("Coaching-Kontext", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "CoachingContext",
|
"table": "CoachingContext",
|
||||||
"fields": ["id", "title", "category", "status"],
|
"fields": ["id", "title", "category", "status", "lastSessionAt"],
|
||||||
"isParent": True,
|
"isParent": True,
|
||||||
"displayFields": ["title", "category", "status"],
|
"displayFields": ["title", "category", "status"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingSession",
|
"objectKey": "data.feature.commcoach.CoachingSession",
|
||||||
"label": "Coaching-Session",
|
"label": t("Coaching-Session", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "CoachingSession",
|
"table": "CoachingSession",
|
||||||
"fields": ["id", "contextId", "status", "summary"],
|
"fields": ["id", "contextId", "status", "summary", "startedAt", "endedAt", "competenceScore"],
|
||||||
|
"isParent": True,
|
||||||
"parentTable": "CoachingContext",
|
"parentTable": "CoachingContext",
|
||||||
"parentKey": "contextId",
|
"parentKey": "contextId",
|
||||||
|
"displayFields": ["startedAt", "status"],
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingMessage",
|
"objectKey": "data.feature.commcoach.CoachingMessage",
|
||||||
"label": "Coaching-Nachricht",
|
"label": t("Coaching-Nachricht", context="UI"),
|
||||||
"meta": {"table": "CoachingMessage", "fields": ["id", "sessionId", "role", "content"]}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"objectKey": "data.feature.commcoach.CoachingTask",
|
|
||||||
"label": "Coaching-Aufgabe",
|
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "CoachingTask",
|
"table": "CoachingMessage",
|
||||||
"fields": ["id", "contextId", "title", "status"],
|
"fields": ["id", "sessionId", "contextId", "role", "content", "contentType"],
|
||||||
"parentTable": "CoachingContext",
|
"parentTable": "CoachingSession",
|
||||||
"parentKey": "contextId",
|
"parentKey": "sessionId",
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingScore",
|
"objectKey": "data.feature.commcoach.CoachingScore",
|
||||||
"label": "Coaching-Score",
|
"label": t("Coaching-Score", context="UI"),
|
||||||
"meta": {"table": "CoachingScore", "fields": ["id", "dimension", "score", "trend"]}
|
"meta": {
|
||||||
|
"table": "CoachingScore",
|
||||||
|
"fields": ["id", "sessionId", "contextId", "dimension", "score", "trend"],
|
||||||
|
"parentTable": "CoachingSession",
|
||||||
|
"parentKey": "sessionId",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.commcoach.CoachingTask",
|
||||||
|
"label": t("Coaching-Aufgabe", context="UI"),
|
||||||
|
"meta": {
|
||||||
|
"table": "CoachingTask",
|
||||||
|
"fields": ["id", "contextId", "title", "status", "priority", "dueDate"],
|
||||||
|
"parentTable": "CoachingContext",
|
||||||
|
"parentKey": "contextId",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
# ── Stammdaten (sessionübergreifend, scoped per userId) ──────────────────
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.commcoach.userData",
|
||||||
|
"label": t("Stammdaten", context="UI"),
|
||||||
|
"meta": {"isGroup": True}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingUserProfile",
|
"objectKey": "data.feature.commcoach.CoachingUserProfile",
|
||||||
"label": "Benutzerprofil",
|
"label": t("Benutzerprofil", context="UI"),
|
||||||
"meta": {"table": "CoachingUserProfile", "fields": ["id", "userId", "dailyReminderEnabled"]}
|
"meta": {
|
||||||
|
"table": "CoachingUserProfile",
|
||||||
|
"group": "data.feature.commcoach.userData",
|
||||||
|
"fields": ["id", "userId", "dailyReminderEnabled", "streakDays", "totalSessions"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingPersona",
|
"objectKey": "data.feature.commcoach.CoachingPersona",
|
||||||
"label": "Coaching-Persona",
|
"label": t("Coaching-Persona", context="UI"),
|
||||||
"meta": {"table": "CoachingPersona", "fields": ["id", "key", "label", "gender"]}
|
"meta": {
|
||||||
|
"table": "CoachingPersona",
|
||||||
|
"group": "data.feature.commcoach.userData",
|
||||||
|
"fields": ["id", "key", "label", "gender", "category"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.CoachingBadge",
|
"objectKey": "data.feature.commcoach.CoachingBadge",
|
||||||
"label": "Coaching-Auszeichnung",
|
"label": t("Coaching-Auszeichnung", context="UI"),
|
||||||
"meta": {"table": "CoachingBadge", "fields": ["id", "badgeKey", "awardedAt"]}
|
"meta": {
|
||||||
|
"table": "CoachingBadge",
|
||||||
|
"group": "data.feature.commcoach.userData",
|
||||||
|
"fields": ["id", "badgeKey", "awardedAt"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.commcoach.*",
|
"objectKey": "data.feature.commcoach.*",
|
||||||
"label": "Alle CommCoach-Daten",
|
"label": t("Alle CommCoach-Daten", context="UI"),
|
||||||
"meta": {"wildcard": True}
|
"meta": {"wildcard": True}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -98,27 +131,27 @@ DATA_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.commcoach.context.create",
|
"objectKey": "resource.feature.commcoach.context.create",
|
||||||
"label": "Kontext erstellen",
|
"label": t("Kontext erstellen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts", "method": "POST"}
|
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.commcoach.context.archive",
|
"objectKey": "resource.feature.commcoach.context.archive",
|
||||||
"label": "Kontext archivieren",
|
"label": t("Kontext archivieren", context="UI"),
|
||||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/archive", "method": "POST"}
|
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/archive", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.commcoach.session.start",
|
"objectKey": "resource.feature.commcoach.session.start",
|
||||||
"label": "Session starten",
|
"label": t("Session starten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/sessions/start", "method": "POST"}
|
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/sessions/start", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.commcoach.session.complete",
|
"objectKey": "resource.feature.commcoach.session.complete",
|
||||||
"label": "Session abschliessen",
|
"label": t("Session abschliessen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/sessions/{sessionId}/complete", "method": "POST"}
|
"meta": {"endpoint": "/api/commcoach/{instanceId}/sessions/{sessionId}/complete", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.commcoach.task.manage",
|
"objectKey": "resource.feature.commcoach.task.manage",
|
||||||
"label": "Aufgaben verwalten",
|
"label": t("Aufgaben verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/tasks", "method": "POST"}
|
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/tasks", "method": "POST"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
284
modules/features/graphicalEditor/_workflowFileSchema.py
Normal file
284
modules/features/graphicalEditor/_workflowFileSchema.py
Normal file
|
|
@ -0,0 +1,284 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""
|
||||||
|
Workflow File Schema (Versioned Envelope) for the GraphicalEditor.
|
||||||
|
|
||||||
|
A *workflow file* is a portable JSON representation of an ``AutoWorkflow`` that
|
||||||
|
can be exchanged between mandates / instances / installations. It contains the
|
||||||
|
graph, the entry-points (invocations), and a small set of metadata under the
|
||||||
|
``$``-prefixed envelope keys.
|
||||||
|
|
||||||
|
Persistence-bound fields (``id``, ``mandateId``, ``featureInstanceId``,
|
||||||
|
``currentVersionId``, ``eventId``, ``active``, ``sysCreated*``,
|
||||||
|
``sysModified*``) are NEVER part of the file — they are stripped on export and
|
||||||
|
re-derived on import.
|
||||||
|
|
||||||
|
Reference: ``wiki/c-work/1-plan/2026-04-pwg-pilot-mietzinsbestaetigung-workflow.md``
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
WORKFLOW_FILE_SCHEMA_VERSION = "1.0"
|
||||||
|
WORKFLOW_FILE_KIND = "poweron.workflow"
|
||||||
|
WORKFLOW_FILE_EXTENSION = ".workflow.json"
|
||||||
|
|
||||||
|
_PERSISTENCE_FIELDS = (
|
||||||
|
"id",
|
||||||
|
"mandateId",
|
||||||
|
"featureInstanceId",
|
||||||
|
"currentVersionId",
|
||||||
|
"eventId",
|
||||||
|
"active",
|
||||||
|
"templateSourceId",
|
||||||
|
"sysCreatedBy",
|
||||||
|
"sysCreatedAt",
|
||||||
|
"sysModifiedBy",
|
||||||
|
"sysModifiedAt",
|
||||||
|
)
|
||||||
|
|
||||||
|
_ENVELOPE_KEYS = (
|
||||||
|
"$schemaVersion",
|
||||||
|
"$kind",
|
||||||
|
"$exportedAt",
|
||||||
|
"$gatewayVersion",
|
||||||
|
)
|
||||||
|
|
||||||
|
_PORTABLE_WORKFLOW_FIELDS = (
|
||||||
|
"label",
|
||||||
|
"description",
|
||||||
|
"tags",
|
||||||
|
"templateScope",
|
||||||
|
"sharedReadOnly",
|
||||||
|
"notifyOnFailure",
|
||||||
|
"isTemplate",
|
||||||
|
"graph",
|
||||||
|
"invocations",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowFileSchemaError(ValueError):
|
||||||
|
"""Raised when a workflow file does not conform to the expected schema."""
|
||||||
|
|
||||||
|
|
||||||
|
def isWorkflowFileEnvelope(payload: Any) -> bool:
|
||||||
|
"""Quick content-sniff used by the UDB to decide whether a file is a
|
||||||
|
workflow envelope (without raising on malformed input)."""
|
||||||
|
if not isinstance(payload, dict):
|
||||||
|
return False
|
||||||
|
if payload.get("$kind") == WORKFLOW_FILE_KIND:
|
||||||
|
return True
|
||||||
|
if "$schemaVersion" in payload and isinstance(payload.get("graph"), dict):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _normalizeNodePosition(node: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Canonicalize node coordinates to top-level ``x`` / ``y``.
|
||||||
|
|
||||||
|
The canvas uses top-level ``x`` / ``y``; the agent ``addNode`` tool also
|
||||||
|
accepts ``position={x, y}``. Files may use either (or both) shape — pick
|
||||||
|
whatever is present and persist the canonical form.
|
||||||
|
"""
|
||||||
|
if not isinstance(node, dict):
|
||||||
|
return node
|
||||||
|
out = dict(node)
|
||||||
|
pos = out.pop("position", None)
|
||||||
|
x = out.get("x")
|
||||||
|
y = out.get("y")
|
||||||
|
if x is None and isinstance(pos, dict):
|
||||||
|
x = pos.get("x")
|
||||||
|
if y is None and isinstance(pos, dict):
|
||||||
|
y = pos.get("y")
|
||||||
|
if x is None:
|
||||||
|
x = 0
|
||||||
|
if y is None:
|
||||||
|
y = 0
|
||||||
|
out["x"] = x
|
||||||
|
out["y"] = y
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def normalizeGraph(graph: Any) -> Dict[str, Any]:
|
||||||
|
"""Return a graph dict with ``nodes`` and ``connections`` lists, node
|
||||||
|
coordinates normalized to top-level ``x`` / ``y``."""
|
||||||
|
if not isinstance(graph, dict):
|
||||||
|
return {"nodes": [], "connections": []}
|
||||||
|
nodes = graph.get("nodes") or []
|
||||||
|
connections = graph.get("connections") or []
|
||||||
|
if not isinstance(nodes, list):
|
||||||
|
nodes = []
|
||||||
|
if not isinstance(connections, list):
|
||||||
|
connections = []
|
||||||
|
return {
|
||||||
|
"nodes": [_normalizeNodePosition(n) for n in nodes if isinstance(n, dict)],
|
||||||
|
"connections": [c for c in connections if isinstance(c, dict)],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _stripPersistenceFields(workflowDict: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Return a copy of *workflowDict* with all persistence-bound fields removed."""
|
||||||
|
out = {}
|
||||||
|
for k, v in workflowDict.items():
|
||||||
|
if k in _PERSISTENCE_FIELDS:
|
||||||
|
continue
|
||||||
|
out[k] = v
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def buildFileFromWorkflow(
|
||||||
|
workflowDict: Dict[str, Any],
|
||||||
|
gatewayVersion: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Build a portable workflow-file envelope from an ``AutoWorkflow`` row.
|
||||||
|
|
||||||
|
Strips persistence-bound fields, normalizes the graph, and prepends the
|
||||||
|
``$``-envelope keys.
|
||||||
|
"""
|
||||||
|
if not isinstance(workflowDict, dict):
|
||||||
|
raise WorkflowFileSchemaError("workflowDict must be a dict")
|
||||||
|
|
||||||
|
body: Dict[str, Any] = {}
|
||||||
|
body["$schemaVersion"] = WORKFLOW_FILE_SCHEMA_VERSION
|
||||||
|
body["$kind"] = WORKFLOW_FILE_KIND
|
||||||
|
body["$exportedAt"] = _isoTimestamp()
|
||||||
|
if gatewayVersion:
|
||||||
|
body["$gatewayVersion"] = str(gatewayVersion)
|
||||||
|
|
||||||
|
stripped = _stripPersistenceFields(workflowDict)
|
||||||
|
for field in _PORTABLE_WORKFLOW_FIELDS:
|
||||||
|
if field in stripped:
|
||||||
|
value = stripped[field]
|
||||||
|
if field == "graph":
|
||||||
|
value = normalizeGraph(value)
|
||||||
|
body[field] = value
|
||||||
|
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def validateFileEnvelope(
|
||||||
|
payload: Any,
|
||||||
|
knownNodeTypes: Optional[List[str]] = None,
|
||||||
|
) -> Tuple[Dict[str, Any], List[str]]:
|
||||||
|
"""Validate a workflow-file envelope.
|
||||||
|
|
||||||
|
Returns ``(normalizedEnvelope, warnings)``. Raises
|
||||||
|
``WorkflowFileSchemaError`` on hard errors (unknown schema version,
|
||||||
|
missing graph, unknown node types).
|
||||||
|
"""
|
||||||
|
if not isinstance(payload, dict):
|
||||||
|
raise WorkflowFileSchemaError("Workflow file must be a JSON object")
|
||||||
|
|
||||||
|
schemaVersion = payload.get("$schemaVersion")
|
||||||
|
if not schemaVersion:
|
||||||
|
raise WorkflowFileSchemaError(
|
||||||
|
"Missing $schemaVersion — file is not a recognized workflow file"
|
||||||
|
)
|
||||||
|
if schemaVersion != WORKFLOW_FILE_SCHEMA_VERSION:
|
||||||
|
raise WorkflowFileSchemaError(
|
||||||
|
f"Unsupported $schemaVersion '{schemaVersion}' "
|
||||||
|
f"(this gateway supports '{WORKFLOW_FILE_SCHEMA_VERSION}')"
|
||||||
|
)
|
||||||
|
|
||||||
|
kind = payload.get("$kind")
|
||||||
|
if kind and kind != WORKFLOW_FILE_KIND:
|
||||||
|
raise WorkflowFileSchemaError(
|
||||||
|
f"Unexpected $kind '{kind}' (expected '{WORKFLOW_FILE_KIND}')"
|
||||||
|
)
|
||||||
|
|
||||||
|
label = payload.get("label")
|
||||||
|
if not isinstance(label, str) or not label.strip():
|
||||||
|
raise WorkflowFileSchemaError("Field 'label' is required and must be a non-empty string")
|
||||||
|
|
||||||
|
graph = payload.get("graph")
|
||||||
|
if not isinstance(graph, dict):
|
||||||
|
raise WorkflowFileSchemaError("Field 'graph' is required and must be an object")
|
||||||
|
|
||||||
|
normalizedGraph = normalizeGraph(graph)
|
||||||
|
warnings: List[str] = []
|
||||||
|
|
||||||
|
if not normalizedGraph["nodes"]:
|
||||||
|
warnings.append("Workflow has no nodes")
|
||||||
|
|
||||||
|
if knownNodeTypes is not None:
|
||||||
|
knownSet = set(knownNodeTypes)
|
||||||
|
unknownTypes = []
|
||||||
|
for node in normalizedGraph["nodes"]:
|
||||||
|
nodeType = node.get("type")
|
||||||
|
if nodeType and nodeType not in knownSet:
|
||||||
|
unknownTypes.append(nodeType)
|
||||||
|
if unknownTypes:
|
||||||
|
uniqueUnknown = sorted(set(unknownTypes))
|
||||||
|
raise WorkflowFileSchemaError(
|
||||||
|
"Workflow file references unknown node type(s) not registered in this gateway: "
|
||||||
|
+ ", ".join(uniqueUnknown)
|
||||||
|
)
|
||||||
|
|
||||||
|
nodeIds = {n.get("id") for n in normalizedGraph["nodes"] if n.get("id")}
|
||||||
|
for c in normalizedGraph["connections"]:
|
||||||
|
src = c.get("source")
|
||||||
|
tgt = c.get("target")
|
||||||
|
if src and src not in nodeIds:
|
||||||
|
warnings.append(f"Connection source '{src}' is not a known node id")
|
||||||
|
if tgt and tgt not in nodeIds:
|
||||||
|
warnings.append(f"Connection target '{tgt}' is not a known node id")
|
||||||
|
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for k in _ENVELOPE_KEYS:
|
||||||
|
if k in payload:
|
||||||
|
out[k] = payload[k]
|
||||||
|
for field in _PORTABLE_WORKFLOW_FIELDS:
|
||||||
|
if field in payload:
|
||||||
|
out[field] = payload[field]
|
||||||
|
out["graph"] = normalizedGraph
|
||||||
|
|
||||||
|
return out, warnings
|
||||||
|
|
||||||
|
|
||||||
|
def envelopeToWorkflowData(
|
||||||
|
envelope: Dict[str, Any],
|
||||||
|
mandateId: str,
|
||||||
|
featureInstanceId: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Convert a validated workflow-file envelope into a dict suitable for
|
||||||
|
``GraphicalEditorObjects.createWorkflow`` / ``updateWorkflow``.
|
||||||
|
|
||||||
|
Imports are always inactive — operators must explicitly activate them.
|
||||||
|
Persistence-bound fields are NEVER copied from the envelope.
|
||||||
|
"""
|
||||||
|
data: Dict[str, Any] = {
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"active": False,
|
||||||
|
}
|
||||||
|
for field in _PORTABLE_WORKFLOW_FIELDS:
|
||||||
|
if field in envelope:
|
||||||
|
data[field] = envelope[field]
|
||||||
|
if "label" not in data or not data["label"]:
|
||||||
|
data["label"] = "Imported Workflow"
|
||||||
|
if "graph" in data:
|
||||||
|
data["graph"] = normalizeGraph(data["graph"])
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _isoTimestamp() -> str:
|
||||||
|
"""UTC timestamp in ISO 8601 format (used for ``$exportedAt``)."""
|
||||||
|
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||||
|
|
||||||
|
|
||||||
|
def buildFileName(label: str) -> str:
|
||||||
|
"""Build a safe filename ``<slug>.workflow.json`` from a workflow label."""
|
||||||
|
base = (label or "workflow").strip().lower()
|
||||||
|
safe_chars = []
|
||||||
|
for ch in base:
|
||||||
|
if ch.isalnum() or ch in ("-", "_"):
|
||||||
|
safe_chars.append(ch)
|
||||||
|
elif ch in (" ", ":", "/", "\\", "."):
|
||||||
|
safe_chars.append("-")
|
||||||
|
slug = "".join(safe_chars).strip("-") or "workflow"
|
||||||
|
while "--" in slug:
|
||||||
|
slug = slug.replace("--", "-")
|
||||||
|
return f"{slug[:80]}{WORKFLOW_FILE_EXTENSION}"
|
||||||
|
|
@ -653,6 +653,62 @@ class GraphicalEditorObjects:
|
||||||
})
|
})
|
||||||
return dict(updated)
|
return dict(updated)
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# Workflow File IO (versioned envelope export/import)
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def exportWorkflowToDict(self, workflowId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Export an existing workflow as a portable file envelope (dict).
|
||||||
|
|
||||||
|
The returned dict is the canonical workflow-file payload (versioned
|
||||||
|
envelope) and can be JSON-serialized as-is. Returns ``None`` if the
|
||||||
|
workflow does not exist for this mandate.
|
||||||
|
"""
|
||||||
|
from modules.features.graphicalEditor._workflowFileSchema import buildFileFromWorkflow
|
||||||
|
|
||||||
|
wf = self.getWorkflow(workflowId)
|
||||||
|
if not wf:
|
||||||
|
return None
|
||||||
|
return buildFileFromWorkflow(wf)
|
||||||
|
|
||||||
|
def importWorkflowFromDict(
|
||||||
|
self,
|
||||||
|
envelope: Dict[str, Any],
|
||||||
|
existingWorkflowId: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Import a workflow-file envelope.
|
||||||
|
|
||||||
|
Validates the envelope, then either creates a new workflow (default)
|
||||||
|
or replaces the graph + invocations of an existing workflow when
|
||||||
|
``existingWorkflowId`` is given. Imports are always saved with
|
||||||
|
``active=False`` so operators can review before scheduling.
|
||||||
|
"""
|
||||||
|
from modules.features.graphicalEditor._workflowFileSchema import (
|
||||||
|
envelopeToWorkflowData,
|
||||||
|
validateFileEnvelope,
|
||||||
|
)
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||||
|
|
||||||
|
knownTypes = [n.get("id") for n in STATIC_NODE_TYPES if isinstance(n, dict) and n.get("id")]
|
||||||
|
normalizedEnvelope, warnings = validateFileEnvelope(envelope, knownNodeTypes=knownTypes)
|
||||||
|
data = envelopeToWorkflowData(
|
||||||
|
normalizedEnvelope,
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
|
||||||
|
if existingWorkflowId:
|
||||||
|
existing = self.getWorkflow(existingWorkflowId)
|
||||||
|
if not existing:
|
||||||
|
raise ValueError(
|
||||||
|
f"Cannot replace workflow {existingWorkflowId}: not found in this mandate"
|
||||||
|
)
|
||||||
|
updated = self.updateWorkflow(existingWorkflowId, data) or {}
|
||||||
|
return {"workflow": updated, "warnings": warnings, "created": False}
|
||||||
|
|
||||||
|
created = self.createWorkflow(data)
|
||||||
|
return {"workflow": created, "warnings": warnings, "created": True}
|
||||||
|
|
||||||
|
|
||||||
# Backward-compatible alias
|
# Backward-compatible alias
|
||||||
Automation2Objects = GraphicalEditorObjects
|
Automation2Objects = GraphicalEditorObjects
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,8 @@ Minimal bootstrap for feature instance creation. Build from here.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any, Optional
|
from typing import Dict, List, Any, Optional
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
FEATURE_CODE = "graphicalEditor"
|
FEATURE_CODE = "graphicalEditor"
|
||||||
|
|
@ -21,28 +23,28 @@ REQUIRED_SERVICES = [
|
||||||
{"serviceKey": "clickup", "meta": {"usage": "ClickUp actions"}},
|
{"serviceKey": "clickup", "meta": {"usage": "ClickUp actions"}},
|
||||||
{"serviceKey": "generation", "meta": {"usage": "file.create document rendering"}},
|
{"serviceKey": "generation", "meta": {"usage": "file.create document rendering"}},
|
||||||
]
|
]
|
||||||
FEATURE_LABEL = "Grafischer Editor"
|
FEATURE_LABEL = t("Grafischer Editor", context="UI")
|
||||||
FEATURE_ICON = "mdi-sitemap"
|
FEATURE_ICON = "mdi-sitemap"
|
||||||
|
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.graphicalEditor.editor",
|
"objectKey": "ui.feature.graphicalEditor.editor",
|
||||||
"label": "Editor",
|
"label": t("Editor", context="UI"),
|
||||||
"meta": {"area": "editor"}
|
"meta": {"area": "editor"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.graphicalEditor.workflows",
|
"objectKey": "ui.feature.graphicalEditor.workflows",
|
||||||
"label": "Workflows",
|
"label": t("Workflows", context="UI"),
|
||||||
"meta": {"area": "workflows"}
|
"meta": {"area": "workflows"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.graphicalEditor.templates",
|
"objectKey": "ui.feature.graphicalEditor.templates",
|
||||||
"label": "Vorlagen",
|
"label": t("Vorlagen", context="UI"),
|
||||||
"meta": {"area": "templates"}
|
"meta": {"area": "templates"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.graphicalEditor.workflows-tasks",
|
"objectKey": "ui.feature.graphicalEditor.workflows-tasks",
|
||||||
"label": "Tasks",
|
"label": t("Tasks", context="UI"),
|
||||||
"meta": {"area": "tasks"}
|
"meta": {"area": "tasks"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -50,17 +52,17 @@ UI_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.graphicalEditor.dashboard",
|
"objectKey": "resource.feature.graphicalEditor.dashboard",
|
||||||
"label": "Dashboard aufrufen",
|
"label": t("Dashboard aufrufen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/info", "method": "GET"}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/info", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.graphicalEditor.node-types",
|
"objectKey": "resource.feature.graphicalEditor.node-types",
|
||||||
"label": "Node-Typen abrufen",
|
"label": t("Node-Typen abrufen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/node-types", "method": "GET"}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/node-types", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.graphicalEditor.execute",
|
"objectKey": "resource.feature.graphicalEditor.execute",
|
||||||
"label": "Workflow ausführen",
|
"label": t("Workflow ausführen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ from .sharepoint import SHAREPOINT_NODES
|
||||||
from .clickup import CLICKUP_NODES
|
from .clickup import CLICKUP_NODES
|
||||||
from .file import FILE_NODES
|
from .file import FILE_NODES
|
||||||
from .trustee import TRUSTEE_NODES
|
from .trustee import TRUSTEE_NODES
|
||||||
|
from .redmine import REDMINE_NODES
|
||||||
from .data import DATA_NODES
|
from .data import DATA_NODES
|
||||||
from .context import CONTEXT_NODES
|
from .context import CONTEXT_NODES
|
||||||
|
|
||||||
|
|
@ -23,6 +24,7 @@ STATIC_NODE_TYPES = (
|
||||||
+ CLICKUP_NODES
|
+ CLICKUP_NODES
|
||||||
+ FILE_NODES
|
+ FILE_NODES
|
||||||
+ TRUSTEE_NODES
|
+ TRUSTEE_NODES
|
||||||
|
+ REDMINE_NODES
|
||||||
+ DATA_NODES
|
+ DATA_NODES
|
||||||
+ CONTEXT_NODES
|
+ CONTEXT_NODES
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Aufgaben in einem Workspace suchen"),
|
"description": t("Aufgaben in einem Workspace suchen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "teamId", "type": "string", "required": True, "frontendType": "text",
|
{"name": "teamId", "type": "string", "required": True, "frontendType": "text",
|
||||||
"description": t("Team-/Workspace-ID")},
|
"description": t("Team-/Workspace-ID")},
|
||||||
|
|
@ -44,6 +45,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Aufgaben einer Liste auflisten"),
|
"description": t("Aufgaben einer Liste auflisten"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "clickupList",
|
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "clickupList",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -68,6 +70,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Eine Aufgabe abrufen"),
|
"description": t("Eine Aufgabe abrufen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
|
|
@ -89,6 +92,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Aufgabe erstellen"),
|
"description": t("Aufgabe erstellen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "teamId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "teamId", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Workspace")},
|
"description": t("Workspace")},
|
||||||
|
|
@ -134,6 +138,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Felder der Aufgabe ändern"),
|
"description": t("Felder der Aufgabe ändern"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
|
|
@ -159,6 +164,7 @@ CLICKUP_NODES = [
|
||||||
"description": t("Datei an Task anhängen"),
|
"description": t("Datei an Task anhängen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ EMAIL_NODES = [
|
||||||
"description": t("Neue E-Mails prüfen"),
|
"description": t("Neue E-Mails prüfen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto Verbindung")},
|
"description": t("E-Mail-Konto Verbindung")},
|
||||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Ordner"), "default": "Inbox"},
|
"description": t("Ordner"), "default": "Inbox"},
|
||||||
|
|
@ -40,6 +41,7 @@ EMAIL_NODES = [
|
||||||
"description": t("E-Mails suchen"),
|
"description": t("E-Mails suchen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto Verbindung")},
|
"description": t("E-Mail-Konto Verbindung")},
|
||||||
{"name": "query", "type": "string", "required": False, "frontendType": "text",
|
{"name": "query", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Suchbegriff"), "default": ""},
|
"description": t("Suchbegriff"), "default": ""},
|
||||||
|
|
@ -75,6 +77,7 @@ EMAIL_NODES = [
|
||||||
"description": t("E-Mail-Entwurf erstellen"),
|
"description": t("E-Mail-Entwurf erstellen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto")},
|
"description": t("E-Mail-Konto")},
|
||||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||||
"description": t("Betreff")},
|
"description": t("Betreff")},
|
||||||
|
|
@ -82,10 +85,15 @@ EMAIL_NODES = [
|
||||||
"description": t("Inhalt")},
|
"description": t("Inhalt")},
|
||||||
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
||||||
"description": t("Empfänger"), "default": ""},
|
"description": t("Empfänger"), "default": ""},
|
||||||
|
{"name": "attachments", "type": "json", "required": False, "frontendType": "attachmentBuilder",
|
||||||
|
"description": t(
|
||||||
|
"Anhänge: Liste von { contentRef | csvFromVariable | base64Content, name, mimeType }. "
|
||||||
|
"Per Wire befüllbar (z.B. CSV aus data.consolidate)."),
|
||||||
|
"default": []},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit", "ConsolidateResult", "DocumentList"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
|
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
|
||||||
"_method": "outlook",
|
"_method": "outlook",
|
||||||
|
|
|
||||||
170
modules/features/graphicalEditor/nodeDefinitions/redmine.py
Normal file
170
modules/features/graphicalEditor/nodeDefinitions/redmine.py
Normal file
|
|
@ -0,0 +1,170 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine node definitions - map to MethodRedmine actions."""
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
REDMINE_NODES = [
|
||||||
|
{
|
||||||
|
"id": "redmine.readTicket",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Ticket lesen"),
|
||||||
|
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||||
|
"description": t("Redmine-Ticket-ID")},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-ticket-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "readTicket",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "redmine.listTickets",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Tickets auflisten"),
|
||||||
|
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||||
|
{"name": "status", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Status-Filter: open | closed | *"), "default": "*"},
|
||||||
|
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||||
|
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
|
||||||
|
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||||
|
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
|
||||||
|
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Nur Tickets dieses Benutzers (ID)")},
|
||||||
|
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-format-list-bulleted", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "listTickets",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "redmine.createTicket",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Ticket erstellen"),
|
||||||
|
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||||
|
"description": t("Ticket-Titel")},
|
||||||
|
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
|
||||||
|
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
|
||||||
|
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"description": t("Ticket-Beschreibung"), "default": ""},
|
||||||
|
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Status-ID (optional)")},
|
||||||
|
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Prioritaet-ID (optional)")},
|
||||||
|
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Zugewiesene Benutzer-ID (optional)")},
|
||||||
|
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Uebergeordnetes Ticket (optional)")},
|
||||||
|
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-ticket-plus-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "createTicket",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "redmine.updateTicket",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Ticket bearbeiten"),
|
||||||
|
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||||
|
"description": t("Ticket-ID")},
|
||||||
|
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Neuer Titel")},
|
||||||
|
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"description": t("Neue Beschreibung")},
|
||||||
|
{"name": "trackerId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Neuer Tracker")},
|
||||||
|
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Neuer Status")},
|
||||||
|
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Neue Prioritaet")},
|
||||||
|
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Neue Zuweisung")},
|
||||||
|
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||||
|
"description": t("Neues Parent-Ticket")},
|
||||||
|
{"name": "notes", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
|
||||||
|
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-ticket-confirmation-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "updateTicket",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "redmine.getStats",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Statistik laden"),
|
||||||
|
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||||
|
"description": t("Zeitraum ab")},
|
||||||
|
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||||
|
"description": t("Zeitraum bis")},
|
||||||
|
{"name": "bucket", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Bucket: day | week | month"), "default": "week"},
|
||||||
|
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-chart-bar", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "getStats",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "redmine.runSync",
|
||||||
|
"category": "redmine",
|
||||||
|
"label": t("Mirror synchronisieren"),
|
||||||
|
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Redmine Feature-Instanz-ID")},
|
||||||
|
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||||
|
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-database-sync", "color": "#4A6FA5", "usesAi": False},
|
||||||
|
"_method": "redmine",
|
||||||
|
"_action": "runSync",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
@ -11,6 +11,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Datei nach Pfad oder Suche finden"),
|
"description": t("Datei nach Pfad oder Suche finden"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "searchQuery", "type": "string", "required": True, "frontendType": "text",
|
{"name": "searchQuery", "type": "string", "required": True, "frontendType": "text",
|
||||||
"description": t("Suchanfrage oder Pfad")},
|
"description": t("Suchanfrage oder Pfad")},
|
||||||
|
|
@ -34,6 +35,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Inhalt aus Datei extrahieren"),
|
"description": t("Inhalt aus Datei extrahieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -54,6 +56,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Datei zu SharePoint hochladen"),
|
"description": t("Datei zu SharePoint hochladen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -74,6 +77,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Dateien in Ordner auflisten"),
|
"description": t("Dateien in Ordner auflisten"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -94,6 +98,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Datei vom Pfad herunterladen"),
|
"description": t("Datei vom Pfad herunterladen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -114,6 +119,7 @@ SHAREPOINT_NODES = [
|
||||||
"description": t("Datei an Ziel kopieren"),
|
"description": t("Datei an Ziel kopieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "sourcePath", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "sourcePath", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,7 @@ TRUSTEE_NODES = [
|
||||||
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": False, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "string", "required": False, "frontendType": "userConnection",
|
||||||
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung"), "default": ""},
|
"description": t("SharePoint-Verbindung"), "default": ""},
|
||||||
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
|
|
@ -89,4 +90,42 @@ TRUSTEE_NODES = [
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "syncToAccounting",
|
"_action": "syncToAccounting",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "trustee.queryData",
|
||||||
|
"category": "trustee",
|
||||||
|
"label": t("Treuhand-Daten abfragen"),
|
||||||
|
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
||||||
|
"parameters": [
|
||||||
|
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Trustee Feature-Instanz-ID")},
|
||||||
|
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
||||||
|
"description": t("Abfragemodus"), "default": "lookup"},
|
||||||
|
{"name": "entity", "type": "string", "required": True, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["tenantWithRent", "contact", "journalLines", "accounts", "balances"]},
|
||||||
|
"description": t("Entität, die gelesen werden soll"), "default": "tenantWithRent"},
|
||||||
|
{"name": "tenantNameRef", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||||
|
"description": t("Mietername (oder {{wire.feld}} aus Upstream)"), "default": ""},
|
||||||
|
{"name": "tenantAddressRef", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||||
|
"description": t("Mieteradresse (Toleranz für Tippfehler)"), "default": ""},
|
||||||
|
{"name": "period", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "journalLines", "balances"]},
|
||||||
|
"description": t("Zeitraum (YYYY oder YYYY-MM-DD/YYYY-MM-DD)"), "default": ""},
|
||||||
|
{"name": "rentAccountPattern", "type": "string", "required": False, "frontendType": "text",
|
||||||
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent"]},
|
||||||
|
"description": t("Konto-Filter für Mietzins (z.B. '6000-6099' oder '6*')"), "default": ""},
|
||||||
|
{"name": "filterJson", "type": "string", "required": False, "frontendType": "textarea",
|
||||||
|
"frontendOptions": {"dependsOn": "mode", "showWhen": ["raw", "aggregate"]},
|
||||||
|
"description": t("Optionaler JSON-Filter für mode=raw/aggregate"), "default": ""},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult"]}},
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||||
|
"meta": {"icon": "mdi-database-search", "color": "#4CAF50", "usesAi": False},
|
||||||
|
"_method": "trustee",
|
||||||
|
"_action": "queryData",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ import math
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPException
|
from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPException
|
||||||
from fastapi.responses import JSONResponse, StreamingResponse
|
from fastapi.responses import JSONResponse, StreamingResponse, Response
|
||||||
from modules.auth import limiter, getRequestContext, RequestContext
|
from modules.auth import limiter, getRequestContext, RequestContext
|
||||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
||||||
from modules.routes.routeHelpers import _applyFiltersAndSort
|
from modules.routes.routeHelpers import _applyFiltersAndSort
|
||||||
|
|
@ -135,6 +135,58 @@ def get_node_types(
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/options/user.connection")
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def get_user_connection_options(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
|
authority: Optional[str] = Query(None, description="Optional authority filter (e.g. 'msft', 'google', 'clickup', 'local')"),
|
||||||
|
activeOnly: bool = Query(True, description="If true (default), only ACTIVE connections are returned"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> dict:
|
||||||
|
"""Return current user's UserConnections as { options: [{ value, label }] }.
|
||||||
|
|
||||||
|
Used by node parameters with frontendType='userConnection'. Optional
|
||||||
|
`authority` lets a node declare which provider it expects (e.g. SharePoint
|
||||||
|
nodes pass authority=msft so only Microsoft connections show up).
|
||||||
|
"""
|
||||||
|
_validateInstanceAccess(instanceId, context)
|
||||||
|
if not context.user:
|
||||||
|
raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
|
||||||
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
|
rootInterface = getRootInterface()
|
||||||
|
try:
|
||||||
|
connections = rootInterface.getUserConnections(str(context.user.id)) or []
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("get_user_connection_options: failed to load connections: %s", e, exc_info=True)
|
||||||
|
return {"options": []}
|
||||||
|
wanted = (authority or "").strip().lower() or None
|
||||||
|
options: List[Dict[str, str]] = []
|
||||||
|
for conn in connections:
|
||||||
|
connStatus = getattr(conn, "status", None)
|
||||||
|
statusVal = connStatus.value if hasattr(connStatus, "value") else str(connStatus or "")
|
||||||
|
if activeOnly and statusVal.lower() != "active":
|
||||||
|
continue
|
||||||
|
connAuthority = getattr(conn, "authority", None)
|
||||||
|
authorityVal = (connAuthority.value if hasattr(connAuthority, "value") else str(connAuthority or "")).lower()
|
||||||
|
if wanted and authorityVal != wanted:
|
||||||
|
continue
|
||||||
|
username = getattr(conn, "externalUsername", "") or ""
|
||||||
|
email = getattr(conn, "externalEmail", "") or ""
|
||||||
|
connId = str(getattr(conn, "id", "") or "")
|
||||||
|
labelParts = [p for p in [username, email] if p]
|
||||||
|
label = " — ".join(labelParts) if labelParts else connId
|
||||||
|
if authorityVal:
|
||||||
|
label = f"[{authorityVal}] {label}"
|
||||||
|
value = f"connection:{authorityVal}:{username}" if authorityVal and username else connId
|
||||||
|
options.append({"value": value, "label": label})
|
||||||
|
logger.info(
|
||||||
|
"graphicalEditor user.connection options: instanceId=%s authority=%s -> %d options",
|
||||||
|
instanceId, wanted, len(options),
|
||||||
|
)
|
||||||
|
return {"options": options}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{instanceId}/execute")
|
@router.post("/{instanceId}/execute")
|
||||||
@limiter.limit("30/minute")
|
@limiter.limit("30/minute")
|
||||||
async def post_execute(
|
async def post_execute(
|
||||||
|
|
@ -753,15 +805,32 @@ async def _runEditorAgent(
|
||||||
|
|
||||||
systemPrompt = (
|
systemPrompt = (
|
||||||
"You are a workflow EDITOR assistant for the GraphicalEditor. "
|
"You are a workflow EDITOR assistant for the GraphicalEditor. "
|
||||||
"Your ONLY job is to BUILD or MODIFY the workflow graph (nodes + connections) "
|
"Your job is to MANAGE workflows for the user — create, rename, "
|
||||||
"for the user — you must NEVER execute the workflow or any of its actions. "
|
"import/export, edit the graph (nodes + connections) — but you must "
|
||||||
"Even when the user says 'create a workflow that sends an email', you build the "
|
"NEVER execute a workflow or any of its actions. Even when the user "
|
||||||
"graph (e.g. add an email node, connect it) — you do NOT actually send an email. "
|
"says 'create a workflow that sends an email', you build the graph "
|
||||||
"\n\nGraph-mutating tools: readWorkflowGraph, listAvailableNodeTypes, "
|
"(add an email node, connect it) — you do NOT actually send an email."
|
||||||
|
"\n\nAvailable tools (all valid — use whichever the user's intent calls for):"
|
||||||
|
"\n Graph-mutating: readWorkflowGraph, listAvailableNodeTypes, "
|
||||||
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
||||||
"autoLayoutWorkflow, validateGraph. "
|
"autoLayoutWorkflow, validateGraph."
|
||||||
"Connection discovery (for parameters of frontendType='userConnection'): listConnections."
|
"\n Workflow lifecycle: createWorkflow (new empty workflow), "
|
||||||
"\n\nMandatory build sequence:"
|
"updateWorkflowMetadata (rename / change description / tags / activate), "
|
||||||
|
"createWorkflowFromFile (import .workflow.json from UDB), "
|
||||||
|
"exportWorkflowToFile (download envelope), deleteWorkflow (destructive — "
|
||||||
|
"ALWAYS confirm with the user before calling)."
|
||||||
|
"\n History: listWorkflowHistory, readWorkflowMessages."
|
||||||
|
"\n Connections (for parameters of frontendType='userConnection'): listConnections."
|
||||||
|
"\n\nIntent → tool mapping (do NOT improvise destructive paths):"
|
||||||
|
"\n • 'rename / umbenennen / call it X / nenne … um' → updateWorkflowMetadata({label: \"X\"})."
|
||||||
|
"\n • 'create empty workflow / new workflow / leeren Workflow' → createWorkflow({label: \"…\"})."
|
||||||
|
"\n • 'import / load from file' → createWorkflowFromFile({fileId: …})."
|
||||||
|
"\n • 'export / save to file / download' → exportWorkflowToFile()."
|
||||||
|
"\n • 'activate / deactivate' → updateWorkflowMetadata({active: true|false})."
|
||||||
|
"\n NEVER batch-call removeNode to 'rebuild' or 'rename' a workflow — that "
|
||||||
|
"destroys the user's work. removeNode is for removing ONE specific node the "
|
||||||
|
"user explicitly asked to delete."
|
||||||
|
"\n\nMandatory build sequence WHEN editing the graph:"
|
||||||
"\n1. readWorkflowGraph — understand current state."
|
"\n1. readWorkflowGraph — understand current state."
|
||||||
"\n2. listAvailableNodeTypes — find candidate node ids."
|
"\n2. listAvailableNodeTypes — find candidate node ids."
|
||||||
"\n3. For EACH node type you plan to add: call describeNodeType(nodeType=...) "
|
"\n3. For EACH node type you plan to add: call describeNodeType(nodeType=...) "
|
||||||
|
|
@ -781,7 +850,7 @@ async def _runEditorAgent(
|
||||||
"\n\nIf a required parameter cannot be filled from the user's request and has "
|
"\n\nIf a required parameter cannot be filled from the user's request and has "
|
||||||
"no safe default, ask the user once for that specific value (e.g. recipient "
|
"no safe default, ask the user once for that specific value (e.g. recipient "
|
||||||
"address, target language, prompt text) instead of leaving the field blank. "
|
"address, target language, prompt text) instead of leaving the field blank. "
|
||||||
"Respond concisely in the user's language and list what you changed in the graph."
|
"Respond concisely in the user's language and list what you changed."
|
||||||
)
|
)
|
||||||
|
|
||||||
editorConfig = AgentConfig(
|
editorConfig = AgentConfig(
|
||||||
|
|
@ -1191,6 +1260,128 @@ def delete_workflow(
|
||||||
return {"success": True}
|
return {"success": True}
|
||||||
|
|
||||||
|
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# Workflow File IO (versioned envelope export/import)
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/workflows/import")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def import_workflow(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
|
body: dict = Body(
|
||||||
|
...,
|
||||||
|
description=(
|
||||||
|
"{ envelope: <workflow-file-envelope>, existingWorkflowId?: str, "
|
||||||
|
"fileId?: str } — supply EITHER the envelope inline OR a fileId of "
|
||||||
|
"a previously uploaded workflow file (.workflow.json)"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> dict:
|
||||||
|
"""Import a workflow from a versioned-envelope file.
|
||||||
|
|
||||||
|
Two input modes:
|
||||||
|
- ``envelope``: the parsed workflow-file payload (preferred for the agent)
|
||||||
|
- ``fileId``: the id of a previously uploaded ``.workflow.json`` in
|
||||||
|
Unified-Data-Bar (preferred for the UI "Import" modal)
|
||||||
|
|
||||||
|
On success returns the created/updated workflow plus any non-fatal
|
||||||
|
warnings (e.g. dangling connection references). Imports are always
|
||||||
|
saved with ``active=False``.
|
||||||
|
"""
|
||||||
|
from modules.features.graphicalEditor._workflowFileSchema import WorkflowFileSchemaError
|
||||||
|
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
|
|
||||||
|
envelope = body.get("envelope") if isinstance(body, dict) else None
|
||||||
|
fileId = body.get("fileId") if isinstance(body, dict) else None
|
||||||
|
existingWorkflowId = body.get("existingWorkflowId") if isinstance(body, dict) else None
|
||||||
|
|
||||||
|
if not envelope and fileId:
|
||||||
|
envelope = _loadEnvelopeFromFile(str(fileId), context)
|
||||||
|
|
||||||
|
if not envelope:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=routeApiMsg("Body must contain 'envelope' or 'fileId'"),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = iface.importWorkflowFromDict(envelope, existingWorkflowId=existingWorkflowId)
|
||||||
|
except WorkflowFileSchemaError as exc:
|
||||||
|
raise HTTPException(status_code=400, detail=str(exc))
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=404, detail=str(exc))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/workflows/{workflowId}/export")
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def export_workflow(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
|
workflowId: str = Path(..., description="Workflow ID"),
|
||||||
|
download: bool = Query(False, description="If true, return as file download"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Export a workflow as a versioned-envelope JSON file.
|
||||||
|
|
||||||
|
With ``download=true`` returns a streaming response with the canonical
|
||||||
|
``<slug>.workflow.json`` filename so the browser triggers a save dialog.
|
||||||
|
Without it returns the envelope inline as JSON (used by the agent and by
|
||||||
|
the editor's "Save to file" → upload-to-UDB flow).
|
||||||
|
"""
|
||||||
|
from modules.features.graphicalEditor._workflowFileSchema import buildFileName
|
||||||
|
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
|
envelope = iface.exportWorkflowToDict(workflowId)
|
||||||
|
if envelope is None:
|
||||||
|
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||||
|
|
||||||
|
if not download:
|
||||||
|
return {"envelope": envelope, "fileName": buildFileName(envelope.get("label", "workflow"))}
|
||||||
|
|
||||||
|
fileName = buildFileName(envelope.get("label", "workflow"))
|
||||||
|
payload = json.dumps(envelope, ensure_ascii=False, indent=2).encode("utf-8")
|
||||||
|
return Response(
|
||||||
|
content=payload,
|
||||||
|
media_type="application/json",
|
||||||
|
headers={"Content-Disposition": f'attachment; filename="{fileName}"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _loadEnvelopeFromFile(fileId: str, context: RequestContext) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Load and parse a ``.workflow.json`` file from the Unified-Data-Bar
|
||||||
|
by file id. Returns the parsed envelope dict or raises HTTPException."""
|
||||||
|
try:
|
||||||
|
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
||||||
|
mgmt = interfaceDbManagement.getInterface(context.user)
|
||||||
|
rawBytes = mgmt.getFileData(fileId)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Failed to load workflow file %s: %s", fileId, exc)
|
||||||
|
raise HTTPException(status_code=404, detail=routeApiMsg(f"File {fileId} not found"))
|
||||||
|
|
||||||
|
if not rawBytes:
|
||||||
|
raise HTTPException(status_code=404, detail=routeApiMsg(f"File {fileId} is empty"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
if isinstance(rawBytes, bytes):
|
||||||
|
text = rawBytes.decode("utf-8")
|
||||||
|
else:
|
||||||
|
text = str(rawBytes)
|
||||||
|
return json.loads(text)
|
||||||
|
except Exception as exc:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=routeApiMsg(f"File {fileId} is not valid JSON: {exc}"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
# Runs and Resume
|
# Runs and Resume
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
|
|
||||||
|
|
@ -8,18 +8,20 @@ Handles feature initialization and RBAC catalog registration.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Feature metadata
|
# Feature metadata
|
||||||
FEATURE_CODE = "neutralization"
|
FEATURE_CODE = "neutralization"
|
||||||
FEATURE_LABEL = "Neutralisierung"
|
FEATURE_LABEL = t("Neutralisierung", context="UI")
|
||||||
FEATURE_ICON = "mdi-shield-check"
|
FEATURE_ICON = "mdi-shield-check"
|
||||||
|
|
||||||
# UI Objects for RBAC catalog
|
# UI Objects for RBAC catalog
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.neutralization.playground",
|
"objectKey": "ui.feature.neutralization.playground",
|
||||||
"label": "Spielwiese",
|
"label": t("Spielwiese", context="UI"),
|
||||||
"meta": {"area": "playground"}
|
"meta": {"area": "playground"}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
@ -28,17 +30,17 @@ UI_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.neutralization.process.text",
|
"objectKey": "resource.feature.neutralization.process.text",
|
||||||
"label": "Text verarbeiten",
|
"label": t("Text verarbeiten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/neutralization/process/text", "method": "POST"}
|
"meta": {"endpoint": "/api/neutralization/process/text", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.neutralization.process.files",
|
"objectKey": "resource.feature.neutralization.process.files",
|
||||||
"label": "Dateien verarbeiten",
|
"label": t("Dateien verarbeiten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/neutralization/process/files", "method": "POST"}
|
"meta": {"endpoint": "/api/neutralization/process/files", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.neutralization.config.update",
|
"objectKey": "resource.feature.neutralization.config.update",
|
||||||
"label": "Konfiguration aktualisieren",
|
"label": t("Konfiguration aktualisieren", context="UI"),
|
||||||
"meta": {"endpoint": "/api/neutralization/config", "method": "PUT"}
|
"meta": {"endpoint": "/api/neutralization/config", "method": "PUT"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -8,30 +8,29 @@ This module also handles feature initialization and RBAC catalog registration.
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Feature metadata for RBAC catalog
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
FEATURE_CODE = "realestate"
|
FEATURE_CODE = "realestate"
|
||||||
FEATURE_LABEL = "Immobilien"
|
FEATURE_LABEL = t("Immobilien", context="UI")
|
||||||
FEATURE_ICON = "mdi-home-city"
|
FEATURE_ICON = "mdi-home-city"
|
||||||
|
|
||||||
# UI Objects for RBAC catalog (only map view)
|
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.realestate.dashboard",
|
"objectKey": "ui.feature.realestate.dashboard",
|
||||||
"label": "Karte",
|
"label": t("Karte", context="UI"),
|
||||||
"meta": {"area": "dashboard"}
|
"meta": {"area": "dashboard"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# Resource Objects for RBAC catalog
|
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.realestate.project.create",
|
"objectKey": "resource.feature.realestate.project.create",
|
||||||
"label": "Projekt erstellen",
|
"label": t("Projekt erstellen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/realestate/project", "method": "POST"}
|
"meta": {"endpoint": "/api/realestate/project", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.realestate.project.delete",
|
"objectKey": "resource.feature.realestate.project.delete",
|
||||||
"label": "Projekt löschen",
|
"label": t("Projekt löschen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/realestate/project/{projectId}", "method": "DELETE"}
|
"meta": {"endpoint": "/api/realestate/project/{projectId}", "method": "DELETE"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
3
modules/features/redmine/__init__.py
Normal file
3
modules/features/redmine/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine feature container -- ticket browser, statistics, AI tools."""
|
||||||
559
modules/features/redmine/datamodelRedmine.py
Normal file
559
modules/features/redmine/datamodelRedmine.py
Normal file
|
|
@ -0,0 +1,559 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine feature data models.
|
||||||
|
|
||||||
|
Two layers:
|
||||||
|
|
||||||
|
1. **Persisted** (``PowerOnModel``, auto-DDL into ``poweron_redmine``):
|
||||||
|
- ``RedmineInstanceConfig``: per-feature-instance connection + sync state.
|
||||||
|
- ``RedmineTicketMirror``: local mirror of a Redmine issue.
|
||||||
|
- ``RedmineRelationMirror``: local mirror of an issue relation.
|
||||||
|
|
||||||
|
2. **Transport** (plain Pydantic): ``Redmine*Dto`` returned over the
|
||||||
|
REST API and shared with the AI tools. The frontend (``RedmineStatsPage``)
|
||||||
|
maps the raw ``RedmineStatsDto`` buckets onto ``ReportSection`` for
|
||||||
|
``FormGeneratorReport``.
|
||||||
|
|
||||||
|
Scale: the mirror tables let us aggregate stats and render the ticket tree
|
||||||
|
for projects with 20k+ tickets without round-tripping the Redmine REST API
|
||||||
|
on every request.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, model_validator
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelBase import PowerOnModel
|
||||||
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
|
|
||||||
|
def _coerceNoneToDefaults(cls, values):
|
||||||
|
"""Replace None values with each field's declared default.
|
||||||
|
|
||||||
|
Reason: Postgres rows written before we added a column return NULL for
|
||||||
|
that column, which Pydantic v2 rejects for non-Optional fields even if
|
||||||
|
a default is declared. We only apply the default when the incoming
|
||||||
|
value is explicitly None AND the field has a default (not a
|
||||||
|
default_factory that would generate a new value).
|
||||||
|
"""
|
||||||
|
if not isinstance(values, dict):
|
||||||
|
return values
|
||||||
|
for name, field in cls.model_fields.items():
|
||||||
|
if name in values and values[name] is None and field.default is not None:
|
||||||
|
values[name] = field.default
|
||||||
|
return values
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Persisted: per feature-instance Redmine connection config + sync state
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@i18nModel("Redmine-Verbindung")
|
||||||
|
class RedmineInstanceConfig(PowerOnModel):
|
||||||
|
"""Per feature-instance Redmine connection config.
|
||||||
|
|
||||||
|
The API key is stored encrypted (``encryptValue`` keyed
|
||||||
|
``"redmineApiKey"``). It is never returned to the frontend in plain
|
||||||
|
text -- the route returns a boolean ``hasApiKey`` flag instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _applyDefaults(cls, values):
|
||||||
|
return _coerceNoneToDefaults(cls, values)
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
description="Primary key",
|
||||||
|
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
featureInstanceId: str = Field(
|
||||||
|
description="FK -> FeatureInstance.id (1:1 per instance)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Feature-Instanz",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": True,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mandateId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Mandate ID (auto-set from feature instance)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Mandant",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
baseUrl: str = Field(
|
||||||
|
default="",
|
||||||
|
description="Redmine base URL, e.g. https://redmine.logobject.ch",
|
||||||
|
json_schema_extra={"label": "Basis-URL", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||||
|
)
|
||||||
|
projectId: str = Field(
|
||||||
|
default="",
|
||||||
|
description="Redmine numeric project id or identifier (slug)",
|
||||||
|
json_schema_extra={"label": "Projekt-ID", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||||
|
)
|
||||||
|
encryptedApiKey: str = Field(
|
||||||
|
default="",
|
||||||
|
description="Encrypted Redmine API key (X-Redmine-API-Key)",
|
||||||
|
json_schema_extra={"label": "API-Key (verschluesselt)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||||
|
)
|
||||||
|
rootTrackerName: str = Field(
|
||||||
|
default="Userstory",
|
||||||
|
description="Name of the tracker used as the tree root in the browser. Set explicitly in config; resolved against the live tracker list at runtime.",
|
||||||
|
json_schema_extra={"label": "Wurzel-Tracker (Name)", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||||
|
)
|
||||||
|
defaultPeriodValue: Optional[Dict[str, Any]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Optional snapshot of a frontend ``PeriodValue`` ({preset, fromDate, toDate}) used as default period when the user opens the feature.",
|
||||||
|
json_schema_extra={"label": "Standard-Zeitraum", "frontend_type": "json", "frontend_readonly": False, "frontend_required": False},
|
||||||
|
)
|
||||||
|
schemaCache: Optional[Dict[str, Any]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Cached project meta: {trackers:[{id,name}], statuses:[{id,name,isClosed}], customFields:[{id,name,fieldFormat,possibleValues}], priorities:[...], users:[{id,name}]}",
|
||||||
|
json_schema_extra={"label": "Schema-Cache", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||||
|
)
|
||||||
|
schemaCachedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UTC timestamp when schemaCache was last refreshed",
|
||||||
|
json_schema_extra={"label": "Schema-Cache-Zeit", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
schemaCacheTtlSeconds: Optional[int] = Field(
|
||||||
|
default=24 * 60 * 60,
|
||||||
|
description="Schema cache TTL in seconds (default 24h). Optional to tolerate NULL rows from auto-DDL upgrades.",
|
||||||
|
json_schema_extra={"label": "Schema-Cache-TTL (s)", "frontend_type": "number", "frontend_readonly": False, "frontend_required": False},
|
||||||
|
)
|
||||||
|
isActive: Optional[bool] = Field(
|
||||||
|
default=True,
|
||||||
|
description="Whether this connection is active",
|
||||||
|
json_schema_extra={"label": "Aktiv", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastConnectedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Timestamp of the last successful whoAmI() call",
|
||||||
|
json_schema_extra={"label": "Letzte Verbindung", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
# ---- Sync state (incremental ticket mirror) ---------------------------
|
||||||
|
lastSyncAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UTC timestamp of the last successful (incremental) mirror sync",
|
||||||
|
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastFullSyncAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UTC timestamp of the last full mirror sync (force=true)",
|
||||||
|
json_schema_extra={"label": "Letzter Full-Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastSyncDurationMs: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Duration of the last sync in milliseconds",
|
||||||
|
json_schema_extra={"label": "Letzte Sync-Dauer (ms)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastSyncTicketCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of tickets upserted in the last sync",
|
||||||
|
json_schema_extra={"label": "Tickets im letzten Sync", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastSyncErrorAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UTC timestamp of the last failed sync",
|
||||||
|
json_schema_extra={"label": "Letzter Sync-Fehler", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
lastSyncErrorMessage: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Error message of the last failed sync",
|
||||||
|
json_schema_extra={"label": "Letzter Fehler", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Redmine-Ticket (Mirror)")
|
||||||
|
class RedmineTicketMirror(PowerOnModel):
|
||||||
|
"""Local mirror of a Redmine issue.
|
||||||
|
|
||||||
|
Composite uniqueness: ``(featureInstanceId, redmineId)``. We do not
|
||||||
|
enforce it via a DB constraint -- the sync logic looks up by these
|
||||||
|
two columns and does an upsert.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _applyDefaults(cls, values):
|
||||||
|
return _coerceNoneToDefaults(cls, values)
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
description="Primary key",
|
||||||
|
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
featureInstanceId: str = Field(
|
||||||
|
description="FK -> FeatureInstance.id",
|
||||||
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||||
|
)
|
||||||
|
mandateId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
redmineId: int = Field(
|
||||||
|
description="Redmine issue id (unique per feature instance)",
|
||||||
|
json_schema_extra={"label": "Redmine-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
|
||||||
|
)
|
||||||
|
subject: str = Field(default="", json_schema_extra={"label": "Titel", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
|
||||||
|
description: str = Field(default="", json_schema_extra={"label": "Beschreibung", "frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
|
||||||
|
trackerId: Optional[int] = Field(default=None, json_schema_extra={"label": "Tracker-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
trackerName: Optional[str] = Field(default=None, json_schema_extra={"label": "Tracker", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
statusId: Optional[int] = Field(default=None, json_schema_extra={"label": "Status-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
statusName: Optional[str] = Field(default=None, json_schema_extra={"label": "Status", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
isClosed: bool = Field(default=False, json_schema_extra={"label": "Geschlossen", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
priorityId: Optional[int] = Field(default=None, json_schema_extra={"label": "Prio-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
priorityName: Optional[str] = Field(default=None, json_schema_extra={"label": "Prioritaet", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
assignedToId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zuweisung-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
assignedToName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zuweisung", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
authorId: Optional[int] = Field(default=None, json_schema_extra={"label": "Autor-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
authorName: Optional[str] = Field(default=None, json_schema_extra={"label": "Autor", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
parentId: Optional[int] = Field(default=None, json_schema_extra={"label": "Parent-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
fixedVersionId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zielversion-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
fixedVersionName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zielversion", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
categoryId: Optional[int] = Field(default=None, json_schema_extra={"label": "Kategorie-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
categoryName: Optional[str] = Field(default=None, json_schema_extra={"label": "Kategorie", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
closedOnTs: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
|
||||||
|
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||||
|
)
|
||||||
|
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
|
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
|
||||||
|
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||||
|
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
|
||||||
|
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||||
|
customFields: Optional[List[Dict[str, Any]]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="List of {id,name,value} as returned by Redmine; stored as JSON",
|
||||||
|
json_schema_extra={"label": "Custom Fields", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
raw: Optional[Dict[str, Any]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Original Redmine issue payload (full)",
|
||||||
|
json_schema_extra={"label": "Roh-Payload", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||||
|
)
|
||||||
|
syncedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UTC epoch when this row was last upserted from Redmine",
|
||||||
|
json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Redmine-Beziehung (Mirror)")
|
||||||
|
class RedmineRelationMirror(PowerOnModel):
|
||||||
|
"""Local mirror of a Redmine issue relation.
|
||||||
|
|
||||||
|
Composite uniqueness: ``(featureInstanceId, redmineRelationId)``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _applyDefaults(cls, values):
|
||||||
|
return _coerceNoneToDefaults(cls, values)
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
featureInstanceId: str = Field(
|
||||||
|
description="FK -> FeatureInstance.id",
|
||||||
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||||
|
)
|
||||||
|
redmineRelationId: int = Field(
|
||||||
|
description="Redmine relation id (unique per feature instance)",
|
||||||
|
json_schema_extra={"label": "Relation-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
|
||||||
|
)
|
||||||
|
issueId: int = Field(
|
||||||
|
description="Source issue id (issue.id from Redmine)",
|
||||||
|
json_schema_extra={"label": "Source-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
|
||||||
|
)
|
||||||
|
issueToId: int = Field(
|
||||||
|
description="Target issue id (issue_to_id from Redmine)",
|
||||||
|
json_schema_extra={"label": "Target-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
|
||||||
|
)
|
||||||
|
relationType: str = Field(
|
||||||
|
default="relates",
|
||||||
|
json_schema_extra={"label": "Beziehungstyp", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
delay: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={"label": "Verzoegerung (Tage)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
syncedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Transport DTOs (not persisted)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class RedmineRelationDto(BaseModel):
|
||||||
|
id: int = Field(description="Relation id")
|
||||||
|
issueId: int = Field(description="Source issue id (issue.id from Redmine)")
|
||||||
|
issueToId: int = Field(description="Target issue id (issue_to_id from Redmine)")
|
||||||
|
relationType: str = Field(description="relates | precedes | follows | blocks | blocked | duplicates | duplicated | copied_to | copied_from | parent")
|
||||||
|
delay: Optional[int] = Field(default=None, description="Delay in days (precedes/follows only)")
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineCustomFieldValueDto(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
value: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineTicketDto(BaseModel):
|
||||||
|
"""Normalised Redmine issue used by the UI and the AI tools."""
|
||||||
|
|
||||||
|
id: int = Field(description="Redmine issue id")
|
||||||
|
subject: str = Field(default="")
|
||||||
|
description: str = Field(default="")
|
||||||
|
trackerId: Optional[int] = None
|
||||||
|
trackerName: Optional[str] = None
|
||||||
|
statusId: Optional[int] = None
|
||||||
|
statusName: Optional[str] = None
|
||||||
|
isClosed: bool = False
|
||||||
|
priorityId: Optional[int] = None
|
||||||
|
priorityName: Optional[str] = None
|
||||||
|
assignedToId: Optional[int] = None
|
||||||
|
assignedToName: Optional[str] = None
|
||||||
|
authorId: Optional[int] = None
|
||||||
|
authorName: Optional[str] = None
|
||||||
|
parentId: Optional[int] = None
|
||||||
|
fixedVersionId: Optional[int] = None
|
||||||
|
fixedVersionName: Optional[str] = None
|
||||||
|
categoryId: Optional[int] = None
|
||||||
|
categoryName: Optional[str] = None
|
||||||
|
createdOn: Optional[str] = None
|
||||||
|
updatedOn: Optional[str] = None
|
||||||
|
customFields: List[RedmineCustomFieldValueDto] = Field(default_factory=list)
|
||||||
|
relations: List[RedmineRelationDto] = Field(default_factory=list)
|
||||||
|
raw: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineFieldChoiceDto(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
isClosed: Optional[bool] = Field(default=None, description="Status only: closed-state flag")
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineCustomFieldSchemaDto(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
fieldFormat: str = Field(default="string")
|
||||||
|
isRequired: bool = False
|
||||||
|
possibleValues: List[str] = Field(default_factory=list)
|
||||||
|
multiple: bool = False
|
||||||
|
defaultValue: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineFieldSchemaDto(BaseModel):
|
||||||
|
"""Project meta returned by ``getProjectMeta``."""
|
||||||
|
|
||||||
|
projectId: str
|
||||||
|
projectName: str = ""
|
||||||
|
trackers: List[RedmineFieldChoiceDto] = Field(default_factory=list)
|
||||||
|
statuses: List[RedmineFieldChoiceDto] = Field(default_factory=list)
|
||||||
|
priorities: List[RedmineFieldChoiceDto] = Field(default_factory=list)
|
||||||
|
users: List[RedmineFieldChoiceDto] = Field(default_factory=list)
|
||||||
|
categories: List[RedmineFieldChoiceDto] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Per-project Redmine issue categories. Empty if the project has none defined or if the API key is not allowed to list them.",
|
||||||
|
)
|
||||||
|
customFields: List[RedmineCustomFieldSchemaDto] = Field(default_factory=list)
|
||||||
|
rootTrackerName: str = "Userstory"
|
||||||
|
rootTrackerId: Optional[int] = Field(default=None, description="Resolved id of the configured rootTrackerName, or None if no matching tracker exists")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Stats DTO -- raw buckets, mapped to ReportSection in the frontend
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class RedmineStatsKpis(BaseModel):
|
||||||
|
total: int = 0
|
||||||
|
open: int = 0
|
||||||
|
closed: int = 0
|
||||||
|
closedInPeriod: int = 0
|
||||||
|
createdInPeriod: int = 0
|
||||||
|
orphans: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineStatusByTrackerEntry(BaseModel):
|
||||||
|
trackerId: Optional[int] = None
|
||||||
|
trackerName: str = ""
|
||||||
|
countsByStatus: Dict[str, int] = Field(default_factory=dict)
|
||||||
|
total: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineAssigneeBucket(BaseModel):
|
||||||
|
assignedToId: Optional[int] = None
|
||||||
|
name: str = "(nicht zugewiesen)"
|
||||||
|
open: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineRelationDistributionEntry(BaseModel):
|
||||||
|
relationType: str
|
||||||
|
count: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineAgingBucket(BaseModel):
|
||||||
|
bucketKey: str
|
||||||
|
label: str
|
||||||
|
minDays: int
|
||||||
|
maxDays: Optional[int] = None
|
||||||
|
count: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineThroughputBucket(BaseModel):
|
||||||
|
"""Per-bucket snapshot used by the Stats page.
|
||||||
|
|
||||||
|
``created`` / ``closed`` keep the per-bucket flow numbers (still useful
|
||||||
|
for callers that want raw deltas), while ``cumTotal`` / ``cumOpen``
|
||||||
|
expose the cumulative snapshot the UI actually plots:
|
||||||
|
|
||||||
|
- ``cumTotal`` = number of tickets that exist as of the END of this
|
||||||
|
bucket (= count of tickets created on or before bucket end).
|
||||||
|
- ``cumOpen`` = of those, how many are still open at bucket end (i.e.
|
||||||
|
not yet closed).
|
||||||
|
"""
|
||||||
|
|
||||||
|
bucketKey: str
|
||||||
|
label: str
|
||||||
|
created: int = 0
|
||||||
|
closed: int = 0
|
||||||
|
cumTotal: int = 0
|
||||||
|
cumOpen: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineStatsDto(BaseModel):
|
||||||
|
"""All sections needed by the Statistics page in one round-trip."""
|
||||||
|
|
||||||
|
instanceId: str
|
||||||
|
dateFrom: Optional[str] = None
|
||||||
|
dateTo: Optional[str] = None
|
||||||
|
bucket: str = "week"
|
||||||
|
trackerIds: List[int] = Field(default_factory=list)
|
||||||
|
categoryIds: List[int] = Field(default_factory=list)
|
||||||
|
statusFilter: str = "*"
|
||||||
|
|
||||||
|
kpis: RedmineStatsKpis = Field(default_factory=RedmineStatsKpis)
|
||||||
|
statusByTracker: List[RedmineStatusByTrackerEntry] = Field(default_factory=list)
|
||||||
|
throughput: List[RedmineThroughputBucket] = Field(default_factory=list)
|
||||||
|
topAssignees: List[RedmineAssigneeBucket] = Field(default_factory=list)
|
||||||
|
relationDistribution: List[RedmineRelationDistributionEntry] = Field(default_factory=list)
|
||||||
|
backlogAging: List[RedmineAgingBucket] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sync DTO
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class RedmineSyncResultDto(BaseModel):
|
||||||
|
instanceId: str
|
||||||
|
full: bool = Field(description="True if a full sync was performed (no incremental cursor)")
|
||||||
|
ticketsUpserted: int = 0
|
||||||
|
relationsUpserted: int = 0
|
||||||
|
durationMs: int = 0
|
||||||
|
lastSyncAt: float
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineSyncStatusDto(BaseModel):
|
||||||
|
instanceId: str
|
||||||
|
lastSyncAt: Optional[float] = None
|
||||||
|
lastFullSyncAt: Optional[float] = None
|
||||||
|
lastSyncDurationMs: Optional[int] = None
|
||||||
|
lastSyncTicketCount: Optional[int] = None
|
||||||
|
lastSyncErrorAt: Optional[float] = None
|
||||||
|
lastSyncErrorMessage: Optional[str] = None
|
||||||
|
mirroredTicketCount: int = 0
|
||||||
|
mirroredRelationCount: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Request bodies
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class RedmineConfigUpdateRequest(BaseModel):
|
||||||
|
"""PUT body for the config endpoint. Fields are all optional -- only
|
||||||
|
provided ones are updated. ``apiKey`` is encrypted before persistence."""
|
||||||
|
|
||||||
|
baseUrl: Optional[str] = None
|
||||||
|
projectId: Optional[str] = None
|
||||||
|
apiKey: Optional[str] = Field(default=None, description="Plain api key; will be encrypted server-side")
|
||||||
|
rootTrackerName: Optional[str] = None
|
||||||
|
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
||||||
|
schemaCacheTtlSeconds: Optional[int] = None
|
||||||
|
isActive: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineConfigDto(BaseModel):
|
||||||
|
"""Frontend-safe view of the config (no plain api key)."""
|
||||||
|
|
||||||
|
id: Optional[str] = None
|
||||||
|
featureInstanceId: str
|
||||||
|
mandateId: Optional[str] = None
|
||||||
|
baseUrl: str = ""
|
||||||
|
projectId: str = ""
|
||||||
|
hasApiKey: bool = False
|
||||||
|
rootTrackerName: str = "Userstory"
|
||||||
|
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
||||||
|
schemaCacheTtlSeconds: int = 24 * 60 * 60
|
||||||
|
schemaCachedAt: Optional[float] = None
|
||||||
|
isActive: bool = True
|
||||||
|
lastConnectedAt: Optional[float] = None
|
||||||
|
lastSyncAt: Optional[float] = None
|
||||||
|
lastFullSyncAt: Optional[float] = None
|
||||||
|
lastSyncTicketCount: Optional[int] = None
|
||||||
|
lastSyncErrorMessage: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineTicketUpdateRequest(BaseModel):
|
||||||
|
"""Body for ``PUT /tickets/{id}``."""
|
||||||
|
|
||||||
|
subject: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
trackerId: Optional[int] = None
|
||||||
|
statusId: Optional[int] = None
|
||||||
|
priorityId: Optional[int] = None
|
||||||
|
assignedToId: Optional[int] = None
|
||||||
|
parentIssueId: Optional[int] = None
|
||||||
|
fixedVersionId: Optional[int] = None
|
||||||
|
notes: Optional[str] = None
|
||||||
|
customFields: Optional[Dict[int, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineTicketCreateRequest(BaseModel):
|
||||||
|
"""Body for ``POST /tickets``."""
|
||||||
|
|
||||||
|
subject: str
|
||||||
|
trackerId: int
|
||||||
|
description: Optional[str] = ""
|
||||||
|
statusId: Optional[int] = None
|
||||||
|
priorityId: Optional[int] = None
|
||||||
|
assignedToId: Optional[int] = None
|
||||||
|
parentIssueId: Optional[int] = None
|
||||||
|
fixedVersionId: Optional[int] = None
|
||||||
|
customFields: Optional[Dict[int, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineRelationCreateRequest(BaseModel):
|
||||||
|
"""Body for ``POST /tickets/{id}/relations``."""
|
||||||
|
|
||||||
|
issueToId: int
|
||||||
|
relationType: str = Field(default="relates")
|
||||||
|
delay: Optional[int] = None
|
||||||
449
modules/features/redmine/interfaceFeatureRedmine.py
Normal file
449
modules/features/redmine/interfaceFeatureRedmine.py
Normal file
|
|
@ -0,0 +1,449 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Interface for the Redmine feature.
|
||||||
|
|
||||||
|
Owns:
|
||||||
|
- Database connection to ``poweron_redmine``
|
||||||
|
- CRUD on ``RedmineInstanceConfig`` (one row per FeatureInstance)
|
||||||
|
- Encryption of the API key (``encryptValue`` keyed ``"redmineApiKey"``)
|
||||||
|
- Resolution of the active config to a ``ConnectorTicketsRedmine`` instance
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
|
from modules.connectors.connectorTicketsRedmine import ConnectorTicketsRedmine
|
||||||
|
from modules.datamodels.datamodelUam import User
|
||||||
|
from modules.features.redmine.datamodelRedmine import (
|
||||||
|
RedmineConfigDto,
|
||||||
|
RedmineConfigUpdateRequest,
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
RedmineRelationMirror,
|
||||||
|
RedmineTicketMirror,
|
||||||
|
)
|
||||||
|
from modules.security.rbac import RbacClass
|
||||||
|
from modules.shared.configuration import APP_CONFIG, decryptValue, encryptValue
|
||||||
|
from modules.shared.dbRegistry import registerDatabase
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
redmineDatabase = "poweron_redmine"
|
||||||
|
registerDatabase(redmineDatabase)
|
||||||
|
|
||||||
|
|
||||||
|
_redmineInterfaces: Dict[str, "RedmineObjects"] = {}
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineObjects:
|
||||||
|
"""Per-user, per-instance Redmine interface."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str] = None,
|
||||||
|
featureInstanceId: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
self.currentUser = currentUser
|
||||||
|
self.userId = currentUser.id if currentUser else None
|
||||||
|
self.mandateId = mandateId
|
||||||
|
self.featureInstanceId = featureInstanceId
|
||||||
|
|
||||||
|
self._initializeDatabase()
|
||||||
|
|
||||||
|
from modules.security.rootAccess import getRootDbAppConnector
|
||||||
|
dbApp = getRootDbAppConnector()
|
||||||
|
self.rbac = RbacClass(self.db, dbApp=dbApp)
|
||||||
|
|
||||||
|
self.db.updateContext(self.userId)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# DB bootstrap
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _initializeDatabase(self) -> None:
|
||||||
|
self.db = DatabaseConnector(
|
||||||
|
dbHost=APP_CONFIG.get("DB_HOST", "_no_config_default_data"),
|
||||||
|
dbDatabase=redmineDatabase,
|
||||||
|
dbUser=APP_CONFIG.get("DB_USER"),
|
||||||
|
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
|
||||||
|
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||||
|
userId=self.userId,
|
||||||
|
)
|
||||||
|
logger.debug(f"Redmine database initialized for user {self.userId}")
|
||||||
|
|
||||||
|
def setUserContext(
|
||||||
|
self,
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str] = None,
|
||||||
|
featureInstanceId: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
self.currentUser = currentUser
|
||||||
|
self.userId = currentUser.id if currentUser else None
|
||||||
|
self.mandateId = mandateId
|
||||||
|
self.featureInstanceId = featureInstanceId
|
||||||
|
self.db.updateContext(self.userId)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Config CRUD
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _findConfigRecord(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
)
|
||||||
|
if not records:
|
||||||
|
return None
|
||||||
|
return dict(records[0])
|
||||||
|
|
||||||
|
def getConfig(self, featureInstanceId: str) -> Optional[RedmineInstanceConfig]:
|
||||||
|
record = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not record:
|
||||||
|
return None
|
||||||
|
return RedmineInstanceConfig(**{k: v for k, v in record.items() if not k.startswith("_")})
|
||||||
|
|
||||||
|
def getConfigDto(self, featureInstanceId: str) -> RedmineConfigDto:
|
||||||
|
cfg = self.getConfig(featureInstanceId)
|
||||||
|
if not cfg:
|
||||||
|
return RedmineConfigDto(
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
)
|
||||||
|
return RedmineConfigDto(
|
||||||
|
id=cfg.id,
|
||||||
|
featureInstanceId=cfg.featureInstanceId,
|
||||||
|
mandateId=cfg.mandateId,
|
||||||
|
baseUrl=cfg.baseUrl or "",
|
||||||
|
projectId=cfg.projectId or "",
|
||||||
|
hasApiKey=bool(cfg.encryptedApiKey),
|
||||||
|
rootTrackerName=cfg.rootTrackerName or "Userstory",
|
||||||
|
defaultPeriodValue=cfg.defaultPeriodValue,
|
||||||
|
schemaCacheTtlSeconds=cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60,
|
||||||
|
schemaCachedAt=cfg.schemaCachedAt,
|
||||||
|
isActive=cfg.isActive if cfg.isActive is not None else True,
|
||||||
|
lastConnectedAt=cfg.lastConnectedAt,
|
||||||
|
lastSyncAt=cfg.lastSyncAt,
|
||||||
|
lastFullSyncAt=cfg.lastFullSyncAt,
|
||||||
|
lastSyncTicketCount=cfg.lastSyncTicketCount,
|
||||||
|
lastSyncErrorMessage=cfg.lastSyncErrorMessage,
|
||||||
|
)
|
||||||
|
|
||||||
|
def upsertConfig(
|
||||||
|
self,
|
||||||
|
featureInstanceId: str,
|
||||||
|
update: RedmineConfigUpdateRequest,
|
||||||
|
) -> RedmineConfigDto:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
|
||||||
|
data: Dict[str, Any] = {}
|
||||||
|
if update.baseUrl is not None:
|
||||||
|
data["baseUrl"] = update.baseUrl.strip().rstrip("/")
|
||||||
|
if update.projectId is not None:
|
||||||
|
data["projectId"] = update.projectId.strip()
|
||||||
|
if update.rootTrackerName is not None:
|
||||||
|
cleaned = update.rootTrackerName.strip()
|
||||||
|
if not cleaned:
|
||||||
|
raise ValueError("rootTrackerName must not be empty")
|
||||||
|
data["rootTrackerName"] = cleaned
|
||||||
|
if update.defaultPeriodValue is not None:
|
||||||
|
data["defaultPeriodValue"] = update.defaultPeriodValue
|
||||||
|
if update.schemaCacheTtlSeconds is not None:
|
||||||
|
data["schemaCacheTtlSeconds"] = int(update.schemaCacheTtlSeconds)
|
||||||
|
if update.isActive is not None:
|
||||||
|
data["isActive"] = bool(update.isActive)
|
||||||
|
|
||||||
|
if update.apiKey is not None:
|
||||||
|
apiKey = update.apiKey.strip()
|
||||||
|
if apiKey == "":
|
||||||
|
data["encryptedApiKey"] = ""
|
||||||
|
else:
|
||||||
|
data["encryptedApiKey"] = encryptValue(
|
||||||
|
apiKey,
|
||||||
|
userId=self.userId or "system",
|
||||||
|
keyName="redmineApiKey",
|
||||||
|
)
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
self.db.recordModify(RedmineInstanceConfig, existing["id"], data)
|
||||||
|
else:
|
||||||
|
seed = RedmineInstanceConfig(
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
).model_dump()
|
||||||
|
seed.update(data)
|
||||||
|
self.db.recordCreate(RedmineInstanceConfig, seed)
|
||||||
|
|
||||||
|
return self.getConfigDto(featureInstanceId)
|
||||||
|
|
||||||
|
def markConfigInvalid(self, featureInstanceId: str, reason: str = "") -> None:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return
|
||||||
|
self.db.recordModify(
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
existing["id"],
|
||||||
|
{"lastConnectedAt": None},
|
||||||
|
)
|
||||||
|
if reason:
|
||||||
|
logger.warning(f"Redmine config {featureInstanceId} invalidated: {reason}")
|
||||||
|
|
||||||
|
def markConfigConnected(self, featureInstanceId: str) -> None:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return
|
||||||
|
self.db.recordModify(
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
existing["id"],
|
||||||
|
{"lastConnectedAt": time.time()},
|
||||||
|
)
|
||||||
|
|
||||||
|
def updateSchemaCache(self, featureInstanceId: str, schema: Dict[str, Any]) -> None:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return
|
||||||
|
self.db.recordModify(
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
existing["id"],
|
||||||
|
{"schemaCache": schema, "schemaCachedAt": time.time()},
|
||||||
|
)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Connector resolution
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _decryptApiKey(self, encryptedApiKey: str) -> str:
|
||||||
|
if not encryptedApiKey:
|
||||||
|
return ""
|
||||||
|
try:
|
||||||
|
return decryptValue(
|
||||||
|
encryptedApiKey,
|
||||||
|
userId=self.userId or "system",
|
||||||
|
keyName="redmineApiKey",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to decrypt Redmine api key: {e}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def resolveConnector(
|
||||||
|
self, featureInstanceId: str
|
||||||
|
) -> Optional[ConnectorTicketsRedmine]:
|
||||||
|
cfg = self.getConfig(featureInstanceId)
|
||||||
|
if not cfg or not cfg.isActive:
|
||||||
|
return None
|
||||||
|
if not cfg.baseUrl or not cfg.projectId or not cfg.encryptedApiKey:
|
||||||
|
return None
|
||||||
|
apiKey = self._decryptApiKey(cfg.encryptedApiKey)
|
||||||
|
if not apiKey:
|
||||||
|
return None
|
||||||
|
return ConnectorTicketsRedmine(
|
||||||
|
baseUrl=cfg.baseUrl,
|
||||||
|
apiKey=apiKey,
|
||||||
|
projectId=cfg.projectId,
|
||||||
|
)
|
||||||
|
|
||||||
|
def deleteConfig(self, featureInstanceId: str) -> bool:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return False
|
||||||
|
self.db.recordDelete(RedmineInstanceConfig, existing["id"])
|
||||||
|
return True
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Sync state
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def recordSyncSuccess(
|
||||||
|
self,
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
full: bool,
|
||||||
|
ticketsUpserted: int,
|
||||||
|
durationMs: int,
|
||||||
|
lastSyncAt: float,
|
||||||
|
) -> None:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return
|
||||||
|
update: Dict[str, Any] = {
|
||||||
|
"lastSyncAt": float(lastSyncAt),
|
||||||
|
"lastSyncDurationMs": int(durationMs),
|
||||||
|
"lastSyncTicketCount": int(ticketsUpserted),
|
||||||
|
"lastSyncErrorAt": None,
|
||||||
|
"lastSyncErrorMessage": None,
|
||||||
|
}
|
||||||
|
if full:
|
||||||
|
update["lastFullSyncAt"] = float(lastSyncAt)
|
||||||
|
self.db.recordModify(RedmineInstanceConfig, existing["id"], update)
|
||||||
|
|
||||||
|
def recordSyncFailure(self, featureInstanceId: str, message: str) -> None:
|
||||||
|
existing = self._findConfigRecord(featureInstanceId)
|
||||||
|
if not existing:
|
||||||
|
return
|
||||||
|
self.db.recordModify(
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
existing["id"],
|
||||||
|
{
|
||||||
|
"lastSyncErrorAt": time.time(),
|
||||||
|
"lastSyncErrorMessage": message[:1000] if message else "unknown error",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Ticket mirror CRUD
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _findMirroredTicket(
|
||||||
|
self, featureInstanceId: str, redmineId: int
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
RedmineTicketMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId, "redmineId": int(redmineId)},
|
||||||
|
)
|
||||||
|
if not records:
|
||||||
|
return None
|
||||||
|
return dict(records[0])
|
||||||
|
|
||||||
|
def upsertMirroredTicket(
|
||||||
|
self,
|
||||||
|
featureInstanceId: str,
|
||||||
|
redmineId: int,
|
||||||
|
record: Dict[str, Any],
|
||||||
|
) -> str:
|
||||||
|
existing = self._findMirroredTicket(featureInstanceId, redmineId)
|
||||||
|
if existing:
|
||||||
|
update = {k: v for k, v in record.items() if k not in {"id"}}
|
||||||
|
self.db.recordModify(RedmineTicketMirror, existing["id"], update)
|
||||||
|
return existing["id"]
|
||||||
|
else:
|
||||||
|
new = self.db.recordCreate(RedmineTicketMirror, record)
|
||||||
|
return new.get("id") if isinstance(new, dict) else record.get("id")
|
||||||
|
|
||||||
|
def deleteMirroredTicket(self, featureInstanceId: str, redmineId: int) -> bool:
|
||||||
|
existing = self._findMirroredTicket(featureInstanceId, redmineId)
|
||||||
|
if not existing:
|
||||||
|
return False
|
||||||
|
self.db.recordDelete(RedmineTicketMirror, existing["id"])
|
||||||
|
return True
|
||||||
|
|
||||||
|
def listMirroredTickets(
|
||||||
|
self,
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
trackerIds: Optional[list] = None,
|
||||||
|
statusIds: Optional[list] = None,
|
||||||
|
assigneeId: Optional[int] = None,
|
||||||
|
updatedFromTs: Optional[float] = None,
|
||||||
|
updatedToTs: Optional[float] = None,
|
||||||
|
) -> list:
|
||||||
|
recordFilter: Dict[str, Any] = {"featureInstanceId": featureInstanceId}
|
||||||
|
records = self.db.getRecordset(RedmineTicketMirror, recordFilter=recordFilter)
|
||||||
|
out = []
|
||||||
|
for r in records or []:
|
||||||
|
d = dict(r)
|
||||||
|
if trackerIds and d.get("trackerId") not in trackerIds:
|
||||||
|
continue
|
||||||
|
if statusIds and d.get("statusId") not in statusIds:
|
||||||
|
continue
|
||||||
|
if assigneeId is not None and d.get("assignedToId") != assigneeId:
|
||||||
|
continue
|
||||||
|
uts = d.get("updatedOnTs")
|
||||||
|
if updatedFromTs is not None and (uts is None or uts < updatedFromTs):
|
||||||
|
continue
|
||||||
|
if updatedToTs is not None and (uts is None or uts > updatedToTs):
|
||||||
|
continue
|
||||||
|
out.append(d)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def countMirroredTickets(self, featureInstanceId: str) -> int:
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
RedmineTicketMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
)
|
||||||
|
return len(records or [])
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Relation mirror CRUD
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def insertMirroredRelation(self, featureInstanceId: str, record: Dict[str, Any]) -> None:
|
||||||
|
self.db.recordCreate(RedmineRelationMirror, record)
|
||||||
|
|
||||||
|
def deleteMirroredRelationsForIssue(self, featureInstanceId: str, issueId: int) -> int:
|
||||||
|
records_a = self.db.getRecordset(
|
||||||
|
RedmineRelationMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId, "issueId": int(issueId)},
|
||||||
|
) or []
|
||||||
|
records_b = self.db.getRecordset(
|
||||||
|
RedmineRelationMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId, "issueToId": int(issueId)},
|
||||||
|
) or []
|
||||||
|
deleted = 0
|
||||||
|
seen = set()
|
||||||
|
for r in list(records_a) + list(records_b):
|
||||||
|
rid = r.get("id")
|
||||||
|
if not rid or rid in seen:
|
||||||
|
continue
|
||||||
|
seen.add(rid)
|
||||||
|
self.db.recordDelete(RedmineRelationMirror, rid)
|
||||||
|
deleted += 1
|
||||||
|
return deleted
|
||||||
|
|
||||||
|
def listMirroredRelations(self, featureInstanceId: str) -> list:
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
RedmineRelationMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
)
|
||||||
|
return [dict(r) for r in (records or [])]
|
||||||
|
|
||||||
|
def countMirroredRelations(self, featureInstanceId: str) -> int:
|
||||||
|
return len(self.db.getRecordset(
|
||||||
|
RedmineRelationMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
) or [])
|
||||||
|
|
||||||
|
def deleteMirroredRelationByRedmineId(
|
||||||
|
self, featureInstanceId: str, redmineRelationId: int
|
||||||
|
) -> bool:
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
RedmineRelationMirror,
|
||||||
|
recordFilter={"featureInstanceId": featureInstanceId, "redmineRelationId": int(redmineRelationId)},
|
||||||
|
)
|
||||||
|
if not records:
|
||||||
|
return False
|
||||||
|
self.db.recordDelete(RedmineRelationMirror, records[0]["id"])
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def getInterface(
|
||||||
|
currentUser: Optional[User] = None,
|
||||||
|
mandateId: Optional[str] = None,
|
||||||
|
featureInstanceId: Optional[str] = None,
|
||||||
|
) -> RedmineObjects:
|
||||||
|
if not currentUser:
|
||||||
|
raise ValueError("Invalid user context: user is required")
|
||||||
|
|
||||||
|
effectiveMandateId = str(mandateId) if mandateId else None
|
||||||
|
effectiveFeatureInstanceId = str(featureInstanceId) if featureInstanceId else None
|
||||||
|
|
||||||
|
contextKey = (
|
||||||
|
f"redmine_{effectiveMandateId}_{effectiveFeatureInstanceId}_{currentUser.id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if contextKey not in _redmineInterfaces:
|
||||||
|
_redmineInterfaces[contextKey] = RedmineObjects(
|
||||||
|
currentUser,
|
||||||
|
mandateId=effectiveMandateId,
|
||||||
|
featureInstanceId=effectiveFeatureInstanceId,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_redmineInterfaces[contextKey].setUserContext(
|
||||||
|
currentUser,
|
||||||
|
mandateId=effectiveMandateId,
|
||||||
|
featureInstanceId=effectiveFeatureInstanceId,
|
||||||
|
)
|
||||||
|
return _redmineInterfaces[contextKey]
|
||||||
335
modules/features/redmine/mainRedmine.py
Normal file
335
modules/features/redmine/mainRedmine.py
Normal file
|
|
@ -0,0 +1,335 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine Feature Container -- Main Module.
|
||||||
|
|
||||||
|
Defines the feature metadata and registers RBAC objects + template roles
|
||||||
|
in the catalog. Loaded automatically by ``modules.system.registry``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
FEATURE_CODE = "redmine"
|
||||||
|
FEATURE_LABEL = t("Redmine", context="UI")
|
||||||
|
FEATURE_ICON = "mdi-bug-outline"
|
||||||
|
|
||||||
|
|
||||||
|
# Wrapping labels in t() at import time registers the keys with the i18n
|
||||||
|
# catalog immediately, so the AI translator picks them up on the next sweep.
|
||||||
|
# Without this, brand-new labels like "Ticket-Browser" stay untranslated and
|
||||||
|
# render as ``[Ticket-Browser]`` in non-de UIs.
|
||||||
|
UI_OBJECTS: List[Dict[str, Any]] = [
|
||||||
|
{"objectKey": "ui.feature.redmine.stats", "label": t("Statistik", context="UI"), "meta": {"area": "stats", "isDefault": True}},
|
||||||
|
{"objectKey": "ui.feature.redmine.browser", "label": t("Ticket-Browser", context="UI"), "meta": {"area": "browser"}},
|
||||||
|
{"objectKey": "ui.feature.redmine.settings", "label": t("Einstellungen", context="UI"), "meta": {"area": "settings", "admin_only": True}},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
DATA_OBJECTS: List[Dict[str, Any]] = [
|
||||||
|
{"objectKey": "data.feature.redmine.config", "label": t("Konfiguration", context="UI"), "meta": {"isGroup": True}},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.redmine.RedmineInstanceConfig",
|
||||||
|
"label": t("Redmine-Verbindung", context="UI"),
|
||||||
|
"meta": {
|
||||||
|
"table": "RedmineInstanceConfig",
|
||||||
|
"group": "data.feature.redmine.config",
|
||||||
|
"fields": ["id", "baseUrl", "projectId", "rootTrackerName", "isActive", "lastConnectedAt", "lastSyncAt"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.redmine.RedmineTicketMirror",
|
||||||
|
"label": t("Redmine-Tickets (Mirror)", context="UI"),
|
||||||
|
"meta": {
|
||||||
|
"table": "RedmineTicketMirror",
|
||||||
|
"group": "data.feature.redmine.config",
|
||||||
|
"fields": ["redmineId", "subject", "trackerName", "statusName", "assignedToName", "updatedOn"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.redmine.RedmineRelationMirror",
|
||||||
|
"label": t("Redmine-Beziehungen (Mirror)", context="UI"),
|
||||||
|
"meta": {
|
||||||
|
"table": "RedmineRelationMirror",
|
||||||
|
"group": "data.feature.redmine.config",
|
||||||
|
"fields": ["redmineRelationId", "issueId", "issueToId", "relationType"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.redmine.*",
|
||||||
|
"label": t("Alle Redmine-Daten", context="UI"),
|
||||||
|
"meta": {"wildcard": True, "description": "Wildcard for all redmine data tables"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
RESOURCE_OBJECTS: List[Dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.tickets.read",
|
||||||
|
"label": t("Tickets lesen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "GET"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.tickets.create",
|
||||||
|
"label": t("Tickets erstellen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "POST"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.tickets.update",
|
||||||
|
"label": t("Tickets bearbeiten", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "PUT"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.tickets.delete",
|
||||||
|
"label": t("Tickets loeschen / archivieren", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "DELETE"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.relations.manage",
|
||||||
|
"label": t("Beziehungen verwalten", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}/relations", "method": "ALL"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.stats.read",
|
||||||
|
"label": t("Statistik einsehen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/stats", "method": "GET"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.config.manage",
|
||||||
|
"label": t("Verbindung verwalten", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/config", "method": "ALL", "admin_only": True},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.config.test",
|
||||||
|
"label": t("Verbindung testen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/config/test", "method": "POST", "admin_only": True},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.sync.run",
|
||||||
|
"label": t("Mirror synchronisieren", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/sync", "method": "POST", "admin_only": True},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.sync.status",
|
||||||
|
"label": t("Sync-Status lesen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/redmine/{instanceId}/sync/status", "method": "GET"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.workflows.view",
|
||||||
|
"label": t("Workflows einsehen", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "resource.feature.redmine.workflows.execute",
|
||||||
|
"label": t("Workflows ausfuehren", context="UI"),
|
||||||
|
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATE_ROLES: List[Dict[str, Any]] = [
|
||||||
|
{
|
||||||
|
"roleLabel": "redmine-viewer",
|
||||||
|
"description": "Redmine-Betrachter -- Tickets und Statistik lesen",
|
||||||
|
"accessRules": [
|
||||||
|
{"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
|
||||||
|
{"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
|
||||||
|
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"roleLabel": "redmine-editor",
|
||||||
|
"description": "Redmine-Bearbeiter -- Tickets erstellen, bearbeiten, Beziehungen pflegen",
|
||||||
|
"accessRules": [
|
||||||
|
{"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
|
||||||
|
{"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
|
||||||
|
{"context": "DATA", "item": None, "view": True, "read": "g", "create": "g", "update": "g", "delete": "n"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"roleLabel": "redmine-admin",
|
||||||
|
"description": "Redmine-Administrator -- Vollzugriff inkl. Einstellungen und Verbindung",
|
||||||
|
"accessRules": [
|
||||||
|
{"context": "UI", "item": None, "view": True},
|
||||||
|
{"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.config.manage", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.config.test", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
|
||||||
|
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Public discovery API (called by registry.py)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def getFeatureDefinition() -> Dict[str, Any]:
|
||||||
|
return {"code": FEATURE_CODE, "label": FEATURE_LABEL, "icon": FEATURE_ICON}
|
||||||
|
|
||||||
|
|
||||||
|
def getUiObjects() -> List[Dict[str, Any]]:
|
||||||
|
return UI_OBJECTS
|
||||||
|
|
||||||
|
|
||||||
|
def getResourceObjects() -> List[Dict[str, Any]]:
|
||||||
|
return RESOURCE_OBJECTS
|
||||||
|
|
||||||
|
|
||||||
|
def getDataObjects() -> List[Dict[str, Any]]:
|
||||||
|
return DATA_OBJECTS
|
||||||
|
|
||||||
|
|
||||||
|
def getTemplateRoles() -> List[Dict[str, Any]]:
|
||||||
|
return TEMPLATE_ROLES
|
||||||
|
|
||||||
|
|
||||||
|
def getTemplateWorkflows() -> List[Dict[str, Any]]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def registerFeature(catalogService) -> bool:
|
||||||
|
"""Register UI / Resource / Data objects and sync template roles."""
|
||||||
|
try:
|
||||||
|
for uiObj in UI_OBJECTS:
|
||||||
|
catalogService.registerUiObject(
|
||||||
|
featureCode=FEATURE_CODE,
|
||||||
|
objectKey=uiObj["objectKey"],
|
||||||
|
label=uiObj["label"],
|
||||||
|
meta=uiObj.get("meta"),
|
||||||
|
)
|
||||||
|
for resObj in RESOURCE_OBJECTS:
|
||||||
|
catalogService.registerResourceObject(
|
||||||
|
featureCode=FEATURE_CODE,
|
||||||
|
objectKey=resObj["objectKey"],
|
||||||
|
label=resObj["label"],
|
||||||
|
meta=resObj.get("meta"),
|
||||||
|
)
|
||||||
|
for dataObj in DATA_OBJECTS:
|
||||||
|
catalogService.registerDataObject(
|
||||||
|
featureCode=FEATURE_CODE,
|
||||||
|
objectKey=dataObj["objectKey"],
|
||||||
|
label=dataObj["label"],
|
||||||
|
meta=dataObj.get("meta"),
|
||||||
|
)
|
||||||
|
_syncTemplateRolesToDb()
|
||||||
|
logger.info(
|
||||||
|
f"Feature '{FEATURE_CODE}' registered "
|
||||||
|
f"{len(UI_OBJECTS)} UI, {len(RESOURCE_OBJECTS)} resource, {len(DATA_OBJECTS)} data objects"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to register feature '{FEATURE_CODE}': {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Template-role sync (mirrors the trustee implementation)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _syncTemplateRolesToDb() -> int:
|
||||||
|
try:
|
||||||
|
from modules.datamodels.datamodelRbac import (
|
||||||
|
AccessRule,
|
||||||
|
AccessRuleContext,
|
||||||
|
Role,
|
||||||
|
)
|
||||||
|
from modules.datamodels.datamodelUtils import coerce_text_multilingual
|
||||||
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
|
|
||||||
|
rootInterface = getRootInterface()
|
||||||
|
existingRoles = rootInterface.getRolesByFeatureCode(FEATURE_CODE)
|
||||||
|
templateRoles = [r for r in existingRoles if r.mandateId is None]
|
||||||
|
existingByLabel: Dict[str, str] = {r.roleLabel: str(r.id) for r in templateRoles}
|
||||||
|
|
||||||
|
createdCount = 0
|
||||||
|
for roleTemplate in TEMPLATE_ROLES:
|
||||||
|
roleLabel = roleTemplate["roleLabel"]
|
||||||
|
if roleLabel in existingByLabel:
|
||||||
|
_ensureAccessRulesForRole(
|
||||||
|
rootInterface,
|
||||||
|
existingByLabel[roleLabel],
|
||||||
|
roleTemplate.get("accessRules", []),
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
newRole = Role(
|
||||||
|
roleLabel=roleLabel,
|
||||||
|
description=coerce_text_multilingual(roleTemplate.get("description", {})),
|
||||||
|
featureCode=FEATURE_CODE,
|
||||||
|
mandateId=None,
|
||||||
|
featureInstanceId=None,
|
||||||
|
isSystemRole=False,
|
||||||
|
)
|
||||||
|
createdRole = rootInterface.db.recordCreate(Role, newRole.model_dump())
|
||||||
|
roleId = createdRole.get("id")
|
||||||
|
_ensureAccessRulesForRole(rootInterface, roleId, roleTemplate.get("accessRules", []))
|
||||||
|
logger.info(f"Created template role '{roleLabel}' with ID {roleId}")
|
||||||
|
createdCount += 1
|
||||||
|
|
||||||
|
return createdCount
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error syncing template roles for feature '{FEATURE_CODE}': {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _ensureAccessRulesForRole(
|
||||||
|
rootInterface, roleId: str, ruleTemplates: List[Dict[str, Any]]
|
||||||
|
) -> int:
|
||||||
|
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
|
||||||
|
|
||||||
|
existingRules = rootInterface.getAccessRulesByRole(roleId)
|
||||||
|
existingSignatures: set[Any] = set()
|
||||||
|
for rule in existingRules:
|
||||||
|
sig = (rule.context.value if rule.context else None, rule.item)
|
||||||
|
existingSignatures.add(sig)
|
||||||
|
|
||||||
|
createdCount = 0
|
||||||
|
for template in ruleTemplates:
|
||||||
|
context = template.get("context", "UI")
|
||||||
|
item = template.get("item")
|
||||||
|
if (context, item) in existingSignatures:
|
||||||
|
continue
|
||||||
|
if context == "UI":
|
||||||
|
contextEnum = AccessRuleContext.UI
|
||||||
|
elif context == "DATA":
|
||||||
|
contextEnum = AccessRuleContext.DATA
|
||||||
|
elif context == "RESOURCE":
|
||||||
|
contextEnum = AccessRuleContext.RESOURCE
|
||||||
|
else:
|
||||||
|
contextEnum = context
|
||||||
|
newRule = AccessRule(
|
||||||
|
roleId=roleId,
|
||||||
|
context=contextEnum,
|
||||||
|
item=item,
|
||||||
|
view=template.get("view", False),
|
||||||
|
read=template.get("read"),
|
||||||
|
create=template.get("create"),
|
||||||
|
update=template.get("update"),
|
||||||
|
delete=template.get("delete"),
|
||||||
|
)
|
||||||
|
rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
|
||||||
|
createdCount += 1
|
||||||
|
return createdCount
|
||||||
482
modules/features/redmine/routeFeatureRedmine.py
Normal file
482
modules/features/redmine/routeFeatureRedmine.py
Normal file
|
|
@ -0,0 +1,482 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""FastAPI routes for the Redmine feature.
|
||||||
|
|
||||||
|
URL pattern: ``/api/redmine/{instanceId}/...`` -- mirrors the Trustee /
|
||||||
|
CommCoach pattern. Every endpoint validates that the feature instance
|
||||||
|
exists and resolves its ``mandateId``. Audit log is written for every
|
||||||
|
write call.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, Depends, HTTPException, Query, Request
|
||||||
|
|
||||||
|
from modules.auth import RequestContext, getRequestContext, limiter
|
||||||
|
from modules.features.redmine import interfaceFeatureRedmine as interfaceDb
|
||||||
|
from modules.features.redmine import (
|
||||||
|
serviceRedmine,
|
||||||
|
serviceRedmineStats,
|
||||||
|
serviceRedmineSync,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.datamodelRedmine import (
|
||||||
|
RedmineConfigDto,
|
||||||
|
RedmineConfigUpdateRequest,
|
||||||
|
RedmineFieldSchemaDto,
|
||||||
|
RedmineRelationCreateRequest,
|
||||||
|
RedmineStatsDto,
|
||||||
|
RedmineSyncResultDto,
|
||||||
|
RedmineSyncStatusDto,
|
||||||
|
RedmineTicketCreateRequest,
|
||||||
|
RedmineTicketDto,
|
||||||
|
RedmineTicketUpdateRequest,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.serviceRedmine import RedmineNotConfiguredError
|
||||||
|
from modules.connectors.connectorTicketsRedmine import RedmineApiError
|
||||||
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
|
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||||
|
from modules.shared.i18nRegistry import apiRouteContext
|
||||||
|
|
||||||
|
routeApiMsg = apiRouteContext("routeFeatureRedmine")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(
|
||||||
|
prefix="/api/redmine",
|
||||||
|
tags=["Redmine"],
|
||||||
|
responses={404: {"description": "Not found"}},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _audit(
|
||||||
|
context: RequestContext,
|
||||||
|
action: str,
|
||||||
|
resourceType: Optional[str] = None,
|
||||||
|
resourceId: Optional[str] = None,
|
||||||
|
details: str = "",
|
||||||
|
success: bool = True,
|
||||||
|
errorMessage: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
from modules.shared.auditLogger import audit_logger
|
||||||
|
audit_logger.logEvent(
|
||||||
|
userId=str(context.user.id),
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=getattr(context, "featureInstanceId", None),
|
||||||
|
category="redmine",
|
||||||
|
action=action,
|
||||||
|
resourceType=resourceType,
|
||||||
|
resourceId=resourceId,
|
||||||
|
details=details,
|
||||||
|
success=success,
|
||||||
|
errorMessage=errorMessage,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Redmine audit log failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
|
||||||
|
"""Returns the resolved ``mandateId`` for the instance."""
|
||||||
|
rootInterface = getRootInterface()
|
||||||
|
featureInterface = getFeatureInterface(rootInterface.db)
|
||||||
|
instance = featureInterface.getFeatureInstance(instanceId)
|
||||||
|
if not instance:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail=routeApiMsg(f"Feature instance '{instanceId}' not found"),
|
||||||
|
)
|
||||||
|
mandateId = (
|
||||||
|
instance.get("mandateId")
|
||||||
|
if isinstance(instance, dict)
|
||||||
|
else getattr(instance, "mandateId", None)
|
||||||
|
)
|
||||||
|
if not mandateId:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=routeApiMsg("Feature instance has no mandateId"),
|
||||||
|
)
|
||||||
|
return str(mandateId)
|
||||||
|
|
||||||
|
|
||||||
|
def _toHttpStatus(e: RedmineApiError) -> int:
|
||||||
|
if e.status in (400, 401, 403, 404, 409, 422):
|
||||||
|
return e.status
|
||||||
|
return 502
|
||||||
|
|
||||||
|
|
||||||
|
def _handleRedmineError(e: RedmineApiError) -> HTTPException:
|
||||||
|
return HTTPException(status_code=_toHttpStatus(e), detail=f"Redmine: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Config
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/config", response_model=RedmineConfigDto)
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def getConfig(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineConfigDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||||
|
return iface.getConfigDto(instanceId)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{instanceId}/config", response_model=RedmineConfigDto)
|
||||||
|
@limiter.limit("20/minute")
|
||||||
|
async def updateConfig(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
body: RedmineConfigUpdateRequest = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineConfigDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||||
|
dto = iface.upsertConfig(instanceId, body)
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.config.updated",
|
||||||
|
"RedmineInstanceConfig",
|
||||||
|
instanceId,
|
||||||
|
details=f"baseUrl={dto.baseUrl} projectId={dto.projectId} hasApiKey={dto.hasApiKey}",
|
||||||
|
)
|
||||||
|
return dto
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{instanceId}/config")
|
||||||
|
@limiter.limit("20/minute")
|
||||||
|
async def deleteConfig(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||||
|
deleted = iface.deleteConfig(instanceId)
|
||||||
|
_audit(context, "redmine.config.deleted", "RedmineInstanceConfig", instanceId, success=deleted)
|
||||||
|
return {"deleted": deleted}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/config/test")
|
||||||
|
@limiter.limit("20/minute")
|
||||||
|
async def testConfig(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
result = await serviceRedmine.testConnection(context.user, mandateId, instanceId)
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.config.test",
|
||||||
|
"RedmineInstanceConfig",
|
||||||
|
instanceId,
|
||||||
|
success=bool(result.get("ok")),
|
||||||
|
errorMessage=str(result.get("message")) if not result.get("ok") else None,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Schema
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/schema", response_model=RedmineFieldSchemaDto)
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def getSchema(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
forceRefresh: bool = Query(False),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineFieldSchemaDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
return await serviceRedmine.getProjectMeta(
|
||||||
|
context.user, mandateId, instanceId, forceRefresh=forceRefresh
|
||||||
|
)
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sync (mirror)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/sync", response_model=RedmineSyncResultDto)
|
||||||
|
@limiter.limit("6/minute")
|
||||||
|
async def runSync(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
force: bool = Query(default=False, description="True -> ignore lastSyncAt and pull every issue."),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineSyncResultDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
result = await serviceRedmineSync.runSync(
|
||||||
|
context.user, mandateId, instanceId, force=force
|
||||||
|
)
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.sync.completed",
|
||||||
|
"RedmineInstanceConfig",
|
||||||
|
instanceId,
|
||||||
|
details=f"full={result.full} tickets={result.ticketsUpserted} relations={result.relationsUpserted} {result.durationMs}ms",
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.sync.completed",
|
||||||
|
"RedmineInstanceConfig",
|
||||||
|
instanceId,
|
||||||
|
success=False,
|
||||||
|
errorMessage=str(e),
|
||||||
|
)
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
except Exception as e:
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.sync.completed",
|
||||||
|
"RedmineInstanceConfig",
|
||||||
|
instanceId,
|
||||||
|
success=False,
|
||||||
|
errorMessage=str(e),
|
||||||
|
)
|
||||||
|
raise HTTPException(status_code=500, detail=f"Sync failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/sync/status", response_model=RedmineSyncStatusDto)
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def getSyncStatus(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineSyncStatusDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
return serviceRedmineSync.getSyncStatus(context.user, mandateId, instanceId)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tickets
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/tickets", response_model=List[RedmineTicketDto])
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def listTickets(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
trackerIds: Optional[List[int]] = Query(default=None),
|
||||||
|
status: str = Query(default="*"),
|
||||||
|
dateFrom: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on >= dateFrom"),
|
||||||
|
dateTo: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on <= dateTo"),
|
||||||
|
assignedToId: Optional[int] = Query(default=None),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> List[RedmineTicketDto]:
|
||||||
|
"""Reads from the local mirror. Trigger a sync via ``POST /sync`` first."""
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
return serviceRedmine.listTickets(
|
||||||
|
context.user,
|
||||||
|
mandateId,
|
||||||
|
instanceId,
|
||||||
|
trackerIds=trackerIds,
|
||||||
|
statusFilter=status,
|
||||||
|
updatedOnFrom=dateFrom,
|
||||||
|
updatedOnTo=dateTo,
|
||||||
|
assignedToId=assignedToId,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
|
||||||
|
@limiter.limit("120/minute")
|
||||||
|
async def getTicket(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
ticket = serviceRedmine.getTicket(context.user, mandateId, instanceId, issueId)
|
||||||
|
if ticket is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Ticket {issueId} not in mirror; run a sync first.")
|
||||||
|
return ticket
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/tickets", response_model=RedmineTicketDto)
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def createTicket(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
body: RedmineTicketCreateRequest = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
ticket = await serviceRedmine.createTicket(context.user, mandateId, instanceId, body)
|
||||||
|
_audit(context, "redmine.ticket.created", "RedmineTicket", str(ticket.id), details=f"trackerId={body.trackerId}")
|
||||||
|
return ticket
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(context, "redmine.ticket.created", "RedmineTicket", "?", success=False, errorMessage=str(e))
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def updateTicket(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
body: RedmineTicketUpdateRequest = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
ticket = await serviceRedmine.updateTicket(context.user, mandateId, instanceId, issueId, body)
|
||||||
|
_audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId))
|
||||||
|
return ticket
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{instanceId}/tickets/{issueId}")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def deleteTicket(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
fallbackStatusId: Optional[int] = Query(default=None, description="If Redmine forbids DELETE, set this status instead"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
result = await serviceRedmine.deleteTicket(
|
||||||
|
context.user, mandateId, instanceId, issueId, fallbackStatusId=fallbackStatusId
|
||||||
|
)
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.ticket.deleted",
|
||||||
|
"RedmineTicket",
|
||||||
|
str(issueId),
|
||||||
|
success=bool(result.get("deleted") or result.get("archived")),
|
||||||
|
details=f"deleted={result.get('deleted')} archived={result.get('archived')}",
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(context, "redmine.ticket.deleted", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Relations
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/tickets/{issueId}/relations")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def addRelation(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
body: RedmineRelationCreateRequest = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
rel = await serviceRedmine.addRelation(context.user, mandateId, instanceId, issueId, body)
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.relation.created",
|
||||||
|
"RedmineRelation",
|
||||||
|
str(rel.get("id")),
|
||||||
|
details=f"{issueId} -[{body.relationType}]-> {body.issueToId}",
|
||||||
|
)
|
||||||
|
return {"relation": rel}
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.relation.created",
|
||||||
|
"RedmineRelation",
|
||||||
|
f"{issueId}->{body.issueToId}",
|
||||||
|
success=False,
|
||||||
|
errorMessage=str(e),
|
||||||
|
)
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{instanceId}/relations/{relationId}")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def deleteRelation(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
relationId: int,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
ok = await serviceRedmine.deleteRelation(context.user, mandateId, instanceId, relationId)
|
||||||
|
_audit(context, "redmine.relation.deleted", "RedmineRelation", str(relationId), success=ok)
|
||||||
|
return {"deleted": ok}
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
_audit(
|
||||||
|
context,
|
||||||
|
"redmine.relation.deleted",
|
||||||
|
"RedmineRelation",
|
||||||
|
str(relationId),
|
||||||
|
success=False,
|
||||||
|
errorMessage=str(e),
|
||||||
|
)
|
||||||
|
raise _handleRedmineError(e)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Stats
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/stats", response_model=RedmineStatsDto)
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
async def getStats(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
dateFrom: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
|
||||||
|
dateTo: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
|
||||||
|
bucket: str = Query(default="week", regex="^(day|week|month)$"),
|
||||||
|
trackerIds: Optional[List[int]] = Query(default=None),
|
||||||
|
categoryIds: Optional[List[int]] = Query(default=None, description="Filter by Redmine issue categories"),
|
||||||
|
statusFilter: str = Query(default="*", regex="^(\\*|open|closed)$", description="Restrict to open/closed/all tickets"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> RedmineStatsDto:
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
try:
|
||||||
|
return await serviceRedmineStats.getStats(
|
||||||
|
context.user,
|
||||||
|
mandateId,
|
||||||
|
instanceId,
|
||||||
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
bucket=bucket,
|
||||||
|
trackerIds=trackerIds,
|
||||||
|
categoryIds=categoryIds,
|
||||||
|
statusFilter=statusFilter,
|
||||||
|
)
|
||||||
|
except RedmineNotConfiguredError as e:
|
||||||
|
raise HTTPException(status_code=409, detail=str(e))
|
||||||
|
except RedmineApiError as e:
|
||||||
|
raise _handleRedmineError(e)
|
||||||
617
modules/features/redmine/serviceRedmine.py
Normal file
617
modules/features/redmine/serviceRedmine.py
Normal file
|
|
@ -0,0 +1,617 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine service layer.
|
||||||
|
|
||||||
|
Sits between the FastAPI route and the connector. Responsibilities:
|
||||||
|
|
||||||
|
- Resolve the connector for an authenticated user / feature instance.
|
||||||
|
- Cache project meta (trackers, statuses, priorities, custom fields, users)
|
||||||
|
on the instance config.
|
||||||
|
- Resolve the configured ``rootTrackerName`` against the live tracker list.
|
||||||
|
No heuristic / no auto-detect.
|
||||||
|
- **Reads come from the local mirror** (``RedmineTicketMirror`` /
|
||||||
|
``RedmineRelationMirror`` in ``poweron_redmine``). The mirror is
|
||||||
|
populated by ``serviceRedmineSync`` (button or scheduler).
|
||||||
|
- **Writes go to Redmine, then immediately upsert the affected ticket
|
||||||
|
into the mirror** so the UI is consistent without waiting for a sync.
|
||||||
|
- Invalidate ``serviceRedmineStatsCache`` after every successful write.
|
||||||
|
|
||||||
|
All AI-tool-friendly entry points are pure async functions taking the
|
||||||
|
authenticated ``User`` plus the explicit ``featureInstanceId`` and
|
||||||
|
``mandateId`` so the same service can be called from REST and from the
|
||||||
|
workflow engine without context-magic.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from modules.connectors.connectorTicketsRedmine import (
|
||||||
|
ConnectorTicketsRedmine,
|
||||||
|
RedmineApiError,
|
||||||
|
)
|
||||||
|
from modules.datamodels.datamodelUam import User
|
||||||
|
from modules.features.redmine.datamodelRedmine import (
|
||||||
|
RedmineCustomFieldSchemaDto,
|
||||||
|
RedmineCustomFieldValueDto,
|
||||||
|
RedmineFieldChoiceDto,
|
||||||
|
RedmineFieldSchemaDto,
|
||||||
|
RedmineRelationCreateRequest,
|
||||||
|
RedmineRelationDto,
|
||||||
|
RedmineTicketCreateRequest,
|
||||||
|
RedmineTicketDto,
|
||||||
|
RedmineTicketUpdateRequest,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.interfaceFeatureRedmine import (
|
||||||
|
RedmineObjects,
|
||||||
|
getInterface,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Resolution helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class RedmineNotConfiguredError(RuntimeError):
|
||||||
|
"""The given feature instance has no usable Redmine config."""
|
||||||
|
|
||||||
|
|
||||||
|
def _resolveContext(
|
||||||
|
currentUser: User, mandateId: Optional[str], featureInstanceId: str
|
||||||
|
) -> Tuple[RedmineObjects, ConnectorTicketsRedmine]:
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
connector = iface.resolveConnector(featureInstanceId)
|
||||||
|
if not connector:
|
||||||
|
raise RedmineNotConfiguredError(
|
||||||
|
f"Redmine instance {featureInstanceId} is not configured or inactive"
|
||||||
|
)
|
||||||
|
return iface, connector
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Project meta -- with TTL cache stored on the config record
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def getProjectMeta(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
forceRefresh: bool = False,
|
||||||
|
) -> RedmineFieldSchemaDto:
|
||||||
|
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
cfg = iface.getConfig(featureInstanceId)
|
||||||
|
if cfg is None:
|
||||||
|
raise RedmineNotConfiguredError("Config row vanished after connector resolve")
|
||||||
|
|
||||||
|
ttl = cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60
|
||||||
|
fresh_enough = (
|
||||||
|
cfg.schemaCache
|
||||||
|
and cfg.schemaCachedAt
|
||||||
|
and (time.time() - cfg.schemaCachedAt) < ttl
|
||||||
|
)
|
||||||
|
if fresh_enough and not forceRefresh:
|
||||||
|
schema = _schemaFromCache(cfg.projectId, cfg.schemaCache, cfg.rootTrackerName)
|
||||||
|
if schema is not None:
|
||||||
|
return schema
|
||||||
|
|
||||||
|
project_info = await connector.getProjectInfo()
|
||||||
|
trackers_raw = await connector.getTrackers()
|
||||||
|
statuses_raw = await connector.getStatuses()
|
||||||
|
priorities_raw = await connector.getPriorities()
|
||||||
|
custom_fields_raw = await connector.getCustomFields()
|
||||||
|
users_raw = await connector.getProjectUsers()
|
||||||
|
categories_raw = await connector.getIssueCategories()
|
||||||
|
|
||||||
|
schema_cache: Dict[str, Any] = {
|
||||||
|
"projectName": project_info.get("name", ""),
|
||||||
|
"trackers": [{"id": t.get("id"), "name": t.get("name")} for t in trackers_raw],
|
||||||
|
"statuses": [
|
||||||
|
{
|
||||||
|
"id": s.get("id"),
|
||||||
|
"name": s.get("name"),
|
||||||
|
"isClosed": bool(s.get("is_closed")),
|
||||||
|
}
|
||||||
|
for s in statuses_raw
|
||||||
|
],
|
||||||
|
"priorities": [{"id": p.get("id"), "name": p.get("name")} for p in priorities_raw],
|
||||||
|
"users": [{"id": u.get("id"), "name": u.get("name")} for u in users_raw],
|
||||||
|
"categories": [{"id": c.get("id"), "name": c.get("name")} for c in categories_raw if c.get("id") is not None],
|
||||||
|
"customFields": [
|
||||||
|
{
|
||||||
|
"id": cf.get("id"),
|
||||||
|
"name": cf.get("name"),
|
||||||
|
"fieldFormat": cf.get("field_format", "string"),
|
||||||
|
"isRequired": bool(cf.get("is_required")),
|
||||||
|
"possibleValues": [pv.get("value") for pv in (cf.get("possible_values") or []) if pv.get("value") is not None],
|
||||||
|
"multiple": bool(cf.get("multiple")),
|
||||||
|
"defaultValue": cf.get("default_value"),
|
||||||
|
}
|
||||||
|
for cf in custom_fields_raw
|
||||||
|
],
|
||||||
|
}
|
||||||
|
iface.updateSchemaCache(featureInstanceId, schema_cache)
|
||||||
|
iface.markConfigConnected(featureInstanceId)
|
||||||
|
|
||||||
|
return _schemaFromCache(cfg.projectId, schema_cache, cfg.rootTrackerName) or RedmineFieldSchemaDto(
|
||||||
|
projectId=cfg.projectId,
|
||||||
|
projectName=schema_cache["projectName"],
|
||||||
|
rootTrackerName=cfg.rootTrackerName,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolveRootTrackerId(
|
||||||
|
rootTrackerName: str, trackers: List[Dict[str, Any]]
|
||||||
|
) -> Optional[int]:
|
||||||
|
"""Resolve the configured root tracker name to a tracker id.
|
||||||
|
|
||||||
|
Strict: case-insensitive exact match. Returns ``None`` if not found
|
||||||
|
(the UI must surface this as a config error).
|
||||||
|
"""
|
||||||
|
target = (rootTrackerName or "").strip().lower()
|
||||||
|
if not target:
|
||||||
|
return None
|
||||||
|
for t in trackers:
|
||||||
|
if str(t.get("name") or "").strip().lower() == target:
|
||||||
|
tid = t.get("id")
|
||||||
|
return int(tid) if tid is not None else None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _schemaFromCache(
|
||||||
|
projectId: str, cache: Optional[Dict[str, Any]], rootTrackerName: str
|
||||||
|
) -> Optional[RedmineFieldSchemaDto]:
|
||||||
|
if not cache:
|
||||||
|
return None
|
||||||
|
trackers = cache.get("trackers") or []
|
||||||
|
return RedmineFieldSchemaDto(
|
||||||
|
projectId=projectId,
|
||||||
|
projectName=str(cache.get("projectName") or ""),
|
||||||
|
trackers=[RedmineFieldChoiceDto(**t) for t in trackers],
|
||||||
|
statuses=[RedmineFieldChoiceDto(**s) for s in cache.get("statuses") or []],
|
||||||
|
priorities=[RedmineFieldChoiceDto(**p) for p in cache.get("priorities") or []],
|
||||||
|
users=[RedmineFieldChoiceDto(**u) for u in cache.get("users") or []],
|
||||||
|
categories=[RedmineFieldChoiceDto(**c) for c in cache.get("categories") or []],
|
||||||
|
customFields=[
|
||||||
|
RedmineCustomFieldSchemaDto(
|
||||||
|
id=cf.get("id"),
|
||||||
|
name=cf.get("name", ""),
|
||||||
|
fieldFormat=cf.get("fieldFormat", "string"),
|
||||||
|
isRequired=bool(cf.get("isRequired")),
|
||||||
|
possibleValues=list(cf.get("possibleValues") or []),
|
||||||
|
multiple=bool(cf.get("multiple")),
|
||||||
|
defaultValue=cf.get("defaultValue"),
|
||||||
|
)
|
||||||
|
for cf in cache.get("customFields") or []
|
||||||
|
if cf.get("id") is not None
|
||||||
|
],
|
||||||
|
rootTrackerName=rootTrackerName,
|
||||||
|
rootTrackerId=_resolveRootTrackerId(rootTrackerName, trackers),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Mirror -> RedmineTicketDto
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _mirroredRowToDto(
|
||||||
|
row: Dict[str, Any], relations: List[Dict[str, Any]], includeRaw: bool = False
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
return RedmineTicketDto(
|
||||||
|
id=int(row.get("redmineId")),
|
||||||
|
subject=str(row.get("subject") or ""),
|
||||||
|
description=str(row.get("description") or ""),
|
||||||
|
trackerId=row.get("trackerId"),
|
||||||
|
trackerName=row.get("trackerName"),
|
||||||
|
statusId=row.get("statusId"),
|
||||||
|
statusName=row.get("statusName"),
|
||||||
|
isClosed=bool(row.get("isClosed")),
|
||||||
|
priorityId=row.get("priorityId"),
|
||||||
|
priorityName=row.get("priorityName"),
|
||||||
|
assignedToId=row.get("assignedToId"),
|
||||||
|
assignedToName=row.get("assignedToName"),
|
||||||
|
authorId=row.get("authorId"),
|
||||||
|
authorName=row.get("authorName"),
|
||||||
|
parentId=row.get("parentId"),
|
||||||
|
fixedVersionId=row.get("fixedVersionId"),
|
||||||
|
fixedVersionName=row.get("fixedVersionName"),
|
||||||
|
categoryId=row.get("categoryId"),
|
||||||
|
categoryName=row.get("categoryName"),
|
||||||
|
createdOn=row.get("createdOn"),
|
||||||
|
updatedOn=row.get("updatedOn"),
|
||||||
|
customFields=[
|
||||||
|
RedmineCustomFieldValueDto(
|
||||||
|
id=int(cf.get("id")),
|
||||||
|
name=str(cf.get("name") or ""),
|
||||||
|
value=cf.get("value"),
|
||||||
|
)
|
||||||
|
for cf in (row.get("customFields") or [])
|
||||||
|
if cf.get("id") is not None
|
||||||
|
],
|
||||||
|
relations=[
|
||||||
|
RedmineRelationDto(
|
||||||
|
id=int(r.get("redmineRelationId") or r.get("id")),
|
||||||
|
issueId=int(r.get("issueId")),
|
||||||
|
issueToId=int(r.get("issueToId")),
|
||||||
|
relationType=str(r.get("relationType") or "relates"),
|
||||||
|
delay=r.get("delay"),
|
||||||
|
)
|
||||||
|
for r in relations
|
||||||
|
if (r.get("redmineRelationId") or r.get("id")) is not None
|
||||||
|
],
|
||||||
|
raw=row.get("raw") if includeRaw else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _isoToEpoch(value: Optional[str]) -> Optional[float]:
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
from datetime import datetime
|
||||||
|
return datetime.fromisoformat(value.replace("Z", "+00:00")).timestamp()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Read API -- from mirror
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def listTickets(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
trackerIds: Optional[List[int]] = None,
|
||||||
|
statusFilter: str = "*",
|
||||||
|
updatedOnFrom: Optional[str] = None,
|
||||||
|
updatedOnTo: Optional[str] = None,
|
||||||
|
assignedToId: Optional[int] = None,
|
||||||
|
) -> List[RedmineTicketDto]:
|
||||||
|
"""List tickets from the local mirror.
|
||||||
|
|
||||||
|
``statusFilter`` accepts ``"open"``, ``"closed"`` or ``"*"`` (any),
|
||||||
|
matching the Redmine ``status_id`` semantics.
|
||||||
|
"""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
rows = iface.listMirroredTickets(
|
||||||
|
featureInstanceId,
|
||||||
|
trackerIds=trackerIds,
|
||||||
|
assigneeId=assignedToId,
|
||||||
|
updatedFromTs=_isoToEpoch(updatedOnFrom),
|
||||||
|
updatedToTs=_isoToEpoch(updatedOnTo),
|
||||||
|
)
|
||||||
|
if statusFilter and statusFilter != "*":
|
||||||
|
want_closed = statusFilter == "closed"
|
||||||
|
rows = [r for r in rows if bool(r.get("isClosed")) == want_closed]
|
||||||
|
|
||||||
|
relations_all = iface.listMirroredRelations(featureInstanceId)
|
||||||
|
relations_by_issue: Dict[int, List[Dict[str, Any]]] = {}
|
||||||
|
ids = {int(r.get("redmineId")) for r in rows}
|
||||||
|
for r in relations_all:
|
||||||
|
a = int(r.get("issueId") or 0)
|
||||||
|
b = int(r.get("issueToId") or 0)
|
||||||
|
for k in (a, b):
|
||||||
|
if k in ids:
|
||||||
|
relations_by_issue.setdefault(k, []).append(r)
|
||||||
|
|
||||||
|
return [
|
||||||
|
_mirroredRowToDto(row, relations_by_issue.get(int(row.get("redmineId")), []))
|
||||||
|
for row in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def getTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
*,
|
||||||
|
includeRaw: bool = True,
|
||||||
|
) -> Optional[RedmineTicketDto]:
|
||||||
|
"""Read a single ticket from the mirror. Returns ``None`` when not present."""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
rows = iface.listMirroredTickets(featureInstanceId)
|
||||||
|
target = next((r for r in rows if int(r.get("redmineId") or 0) == int(issueId)), None)
|
||||||
|
if target is None:
|
||||||
|
return None
|
||||||
|
relations_all = iface.listMirroredRelations(featureInstanceId)
|
||||||
|
rel = [
|
||||||
|
r for r in relations_all
|
||||||
|
if int(r.get("issueId") or 0) == int(issueId) or int(r.get("issueToId") or 0) == int(issueId)
|
||||||
|
]
|
||||||
|
return _mirroredRowToDto(target, rel, includeRaw=includeRaw)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Write API -- idempotent + cache invalidation + mirror upsert
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _invalidateCache(featureInstanceId: str) -> None:
|
||||||
|
try:
|
||||||
|
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def _diffPayload(
|
||||||
|
current: RedmineTicketDto, update: RedmineTicketUpdateRequest
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Return the Redmine ``issue`` payload containing only changed fields."""
|
||||||
|
payload: Dict[str, Any] = {}
|
||||||
|
if update.subject is not None and update.subject != current.subject:
|
||||||
|
payload["subject"] = update.subject
|
||||||
|
if update.description is not None and update.description != current.description:
|
||||||
|
payload["description"] = update.description
|
||||||
|
if update.trackerId is not None and update.trackerId != current.trackerId:
|
||||||
|
payload["tracker_id"] = int(update.trackerId)
|
||||||
|
if update.statusId is not None and update.statusId != current.statusId:
|
||||||
|
payload["status_id"] = int(update.statusId)
|
||||||
|
if update.priorityId is not None and update.priorityId != current.priorityId:
|
||||||
|
payload["priority_id"] = int(update.priorityId)
|
||||||
|
if update.assignedToId is not None and update.assignedToId != current.assignedToId:
|
||||||
|
payload["assigned_to_id"] = int(update.assignedToId)
|
||||||
|
if update.parentIssueId is not None and update.parentIssueId != current.parentId:
|
||||||
|
payload["parent_issue_id"] = int(update.parentIssueId)
|
||||||
|
if update.fixedVersionId is not None and update.fixedVersionId != current.fixedVersionId:
|
||||||
|
payload["fixed_version_id"] = int(update.fixedVersionId)
|
||||||
|
if update.customFields:
|
||||||
|
current_by_id = {cf.id: cf.value for cf in current.customFields}
|
||||||
|
cf_payload: List[Dict[str, Any]] = []
|
||||||
|
for cf_id, value in update.customFields.items():
|
||||||
|
try:
|
||||||
|
cf_id_int = int(cf_id)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if current_by_id.get(cf_id_int) != value:
|
||||||
|
cf_payload.append({"id": cf_id_int, "value": value})
|
||||||
|
if cf_payload:
|
||||||
|
payload["custom_fields"] = cf_payload
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
async def _refreshMirroredTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
) -> None:
|
||||||
|
from modules.features.redmine.serviceRedmineSync import upsertSingleTicket
|
||||||
|
try:
|
||||||
|
await upsertSingleTicket(currentUser, mandateId, featureInstanceId, int(issueId))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Mirror upsert for issue {issueId} failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def updateTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
update: RedmineTicketUpdateRequest,
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
"""Idempotent: fetch the issue from Redmine (live, for diff accuracy),
|
||||||
|
only PUT if non-empty, then upsert the mirror."""
|
||||||
|
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
|
||||||
|
current_live = await connector.getIssue(int(issueId), includeRelations=False)
|
||||||
|
current = _liveIssueToDto(current_live, schema)
|
||||||
|
payload = _diffPayload(current, update)
|
||||||
|
if not payload and not update.notes:
|
||||||
|
return current
|
||||||
|
|
||||||
|
await connector.updateIssue(int(issueId), payload, notes=update.notes)
|
||||||
|
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
refreshed = getTicket(currentUser, mandateId, featureInstanceId, int(issueId), includeRaw=True)
|
||||||
|
return refreshed or current
|
||||||
|
|
||||||
|
|
||||||
|
async def createTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
payload: RedmineTicketCreateRequest,
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
|
||||||
|
fields: Dict[str, Any] = {
|
||||||
|
"subject": payload.subject,
|
||||||
|
"tracker_id": int(payload.trackerId),
|
||||||
|
"description": payload.description or "",
|
||||||
|
}
|
||||||
|
if payload.statusId is not None:
|
||||||
|
fields["status_id"] = int(payload.statusId)
|
||||||
|
if payload.priorityId is not None:
|
||||||
|
fields["priority_id"] = int(payload.priorityId)
|
||||||
|
if payload.assignedToId is not None:
|
||||||
|
fields["assigned_to_id"] = int(payload.assignedToId)
|
||||||
|
if payload.parentIssueId is not None:
|
||||||
|
fields["parent_issue_id"] = int(payload.parentIssueId)
|
||||||
|
if payload.fixedVersionId is not None:
|
||||||
|
fields["fixed_version_id"] = int(payload.fixedVersionId)
|
||||||
|
if payload.customFields:
|
||||||
|
fields["custom_fields"] = [
|
||||||
|
{"id": int(k), "value": v} for k, v in payload.customFields.items()
|
||||||
|
]
|
||||||
|
created = await connector.createIssue(fields)
|
||||||
|
if created.get("id"):
|
||||||
|
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(created["id"]))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
if not created.get("id"):
|
||||||
|
return _liveIssueToDto(created, schema, includeRaw=True)
|
||||||
|
fresh = getTicket(currentUser, mandateId, featureInstanceId, int(created["id"]), includeRaw=True)
|
||||||
|
return fresh or _liveIssueToDto(created, schema, includeRaw=True)
|
||||||
|
|
||||||
|
|
||||||
|
async def deleteTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
*,
|
||||||
|
fallbackStatusId: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Try DELETE; on Redmine's 403/401 silently fall back to a closed
|
||||||
|
status if ``fallbackStatusId`` is provided.
|
||||||
|
|
||||||
|
Returns ``{deleted: bool, archived: bool, statusId: int|None}``.
|
||||||
|
"""
|
||||||
|
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
deleted = await connector.deleteIssue(int(issueId))
|
||||||
|
if deleted:
|
||||||
|
from modules.features.redmine.serviceRedmineSync import deleteMirroredTicket
|
||||||
|
deleteMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
return {"deleted": True, "archived": False, "statusId": None}
|
||||||
|
if fallbackStatusId is not None:
|
||||||
|
await connector.updateIssue(
|
||||||
|
int(issueId),
|
||||||
|
{"status_id": int(fallbackStatusId)},
|
||||||
|
notes="Archived via Porta -- delete forbidden by Redmine",
|
||||||
|
)
|
||||||
|
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
return {"deleted": False, "archived": True, "statusId": int(fallbackStatusId)}
|
||||||
|
return {"deleted": False, "archived": False, "statusId": None}
|
||||||
|
|
||||||
|
|
||||||
|
async def addRelation(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
payload: RedmineRelationCreateRequest,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
rel = await connector.addRelation(
|
||||||
|
int(issueId),
|
||||||
|
int(payload.issueToId),
|
||||||
|
relationType=payload.relationType,
|
||||||
|
delay=payload.delay,
|
||||||
|
)
|
||||||
|
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
|
||||||
|
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(payload.issueToId))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
return rel
|
||||||
|
|
||||||
|
|
||||||
|
async def deleteRelation(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
relationId: int,
|
||||||
|
) -> bool:
|
||||||
|
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
|
||||||
|
ok = await connector.deleteRelation(int(relationId))
|
||||||
|
if ok:
|
||||||
|
iface.deleteMirroredRelationByRedmineId(featureInstanceId, int(relationId))
|
||||||
|
_invalidateCache(featureInstanceId)
|
||||||
|
return ok
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Live (Redmine) -> RedmineTicketDto -- only used by the write paths to
|
||||||
|
# compute idempotent diffs against the current Redmine state.
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _statusIsClosedFromSchema(statusId: Optional[int], schema: Optional[RedmineFieldSchemaDto]) -> bool:
|
||||||
|
if statusId is None or schema is None:
|
||||||
|
return False
|
||||||
|
for s in schema.statuses:
|
||||||
|
if s.id == statusId:
|
||||||
|
return bool(s.isClosed)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _liveIssueToDto(
|
||||||
|
issue: Dict[str, Any], schema: Optional[RedmineFieldSchemaDto] = None, *, includeRaw: bool = False
|
||||||
|
) -> RedmineTicketDto:
|
||||||
|
tracker = issue.get("tracker") or {}
|
||||||
|
status = issue.get("status") or {}
|
||||||
|
priority = issue.get("priority") or {}
|
||||||
|
assigned = issue.get("assigned_to") or {}
|
||||||
|
author = issue.get("author") or {}
|
||||||
|
fixed_version = issue.get("fixed_version") or {}
|
||||||
|
category = issue.get("category") or {}
|
||||||
|
status_id = status.get("id")
|
||||||
|
return RedmineTicketDto(
|
||||||
|
id=int(issue.get("id")),
|
||||||
|
subject=str(issue.get("subject") or ""),
|
||||||
|
description=str(issue.get("description") or ""),
|
||||||
|
trackerId=tracker.get("id"),
|
||||||
|
trackerName=tracker.get("name"),
|
||||||
|
statusId=status_id,
|
||||||
|
statusName=status.get("name"),
|
||||||
|
isClosed=_statusIsClosedFromSchema(status_id, schema),
|
||||||
|
priorityId=priority.get("id"),
|
||||||
|
priorityName=priority.get("name"),
|
||||||
|
assignedToId=assigned.get("id"),
|
||||||
|
assignedToName=assigned.get("name"),
|
||||||
|
authorId=author.get("id"),
|
||||||
|
authorName=author.get("name"),
|
||||||
|
parentId=(issue.get("parent") or {}).get("id"),
|
||||||
|
fixedVersionId=fixed_version.get("id"),
|
||||||
|
fixedVersionName=fixed_version.get("name"),
|
||||||
|
categoryId=category.get("id"),
|
||||||
|
categoryName=category.get("name"),
|
||||||
|
createdOn=issue.get("created_on"),
|
||||||
|
updatedOn=issue.get("updated_on"),
|
||||||
|
customFields=[
|
||||||
|
RedmineCustomFieldValueDto(
|
||||||
|
id=int(cf.get("id")),
|
||||||
|
name=str(cf.get("name") or ""),
|
||||||
|
value=cf.get("value"),
|
||||||
|
)
|
||||||
|
for cf in issue.get("custom_fields") or []
|
||||||
|
if cf.get("id") is not None
|
||||||
|
],
|
||||||
|
relations=[
|
||||||
|
RedmineRelationDto(
|
||||||
|
id=int(r.get("id")),
|
||||||
|
issueId=int(r.get("issue_id")),
|
||||||
|
issueToId=int(r.get("issue_to_id")),
|
||||||
|
relationType=str(r.get("relation_type") or "relates"),
|
||||||
|
delay=r.get("delay"),
|
||||||
|
)
|
||||||
|
for r in issue.get("relations") or []
|
||||||
|
if r.get("id") is not None
|
||||||
|
],
|
||||||
|
raw=issue if includeRaw else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Connection self-test (used by the Settings page button)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def testConnection(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Calls ``whoAmI`` and a minimal project fetch. Updates the
|
||||||
|
``lastConnectedAt`` timestamp on success. Never raises -- returns a
|
||||||
|
structured dict for the UI."""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
connector = iface.resolveConnector(featureInstanceId)
|
||||||
|
if not connector:
|
||||||
|
return {"ok": False, "reason": "notConfigured", "message": "Keine gueltige Redmine-Konfiguration."}
|
||||||
|
try:
|
||||||
|
user = await connector.whoAmI()
|
||||||
|
project = await connector.getProjectInfo()
|
||||||
|
iface.markConfigConnected(featureInstanceId)
|
||||||
|
return {
|
||||||
|
"ok": True,
|
||||||
|
"user": {"id": user.get("id"), "name": (user.get("firstname") or "") + " " + (user.get("lastname") or "")},
|
||||||
|
"project": {"id": project.get("id"), "name": project.get("name")},
|
||||||
|
}
|
||||||
|
except RedmineApiError as e:
|
||||||
|
return {"ok": False, "reason": "httpError", "status": e.status, "message": (e.body or "")[:300]}
|
||||||
|
except Exception as e:
|
||||||
|
return {"ok": False, "reason": "exception", "message": str(e)[:300]}
|
||||||
521
modules/features/redmine/serviceRedmineStats.py
Normal file
521
modules/features/redmine/serviceRedmineStats.py
Normal file
|
|
@ -0,0 +1,521 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Redmine statistics aggregator.
|
||||||
|
|
||||||
|
Returns raw buckets in :class:`RedmineStatsDto`. The frontend
|
||||||
|
(``RedmineStatsPage.tsx``) maps these onto ``ReportSection`` for
|
||||||
|
``FormGeneratorReport``. Decision 2026-04-21.
|
||||||
|
|
||||||
|
Sections produced:
|
||||||
|
- KPIs: total / open / closed / closedInPeriod / createdInPeriod / orphans
|
||||||
|
- statusByTracker (stacked bar)
|
||||||
|
- throughput (line chart, created vs closed per bucket)
|
||||||
|
- topAssignees (top-10 horizontal bar)
|
||||||
|
- relationDistribution (pie)
|
||||||
|
- backlogAging (open issues by age since last update)
|
||||||
|
|
||||||
|
The whole result is cached in :mod:`serviceRedmineStatsCache` keyed by
|
||||||
|
``(instanceId, dateFrom, dateTo, bucket, trackerIds)`` with a 90 s TTL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import bisect
|
||||||
|
import datetime as _dt
|
||||||
|
import logging
|
||||||
|
from collections import Counter, defaultdict
|
||||||
|
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelUam import User
|
||||||
|
from modules.features.redmine.datamodelRedmine import (
|
||||||
|
RedmineAgingBucket,
|
||||||
|
RedmineAssigneeBucket,
|
||||||
|
RedmineFieldSchemaDto,
|
||||||
|
RedmineRelationDistributionEntry,
|
||||||
|
RedmineStatsDto,
|
||||||
|
RedmineStatsKpis,
|
||||||
|
RedmineStatusByTrackerEntry,
|
||||||
|
RedmineThroughputBucket,
|
||||||
|
RedmineTicketDto,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Public entry
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def getStats(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
dateFrom: Optional[str] = None,
|
||||||
|
dateTo: Optional[str] = None,
|
||||||
|
bucket: str = "week",
|
||||||
|
trackerIds: Optional[List[int]] = None,
|
||||||
|
categoryIds: Optional[List[int]] = None,
|
||||||
|
statusFilter: str = "*",
|
||||||
|
) -> RedmineStatsDto:
|
||||||
|
"""Compute (or fetch from cache) the full statistics payload."""
|
||||||
|
bucket_norm = (bucket or "week").lower()
|
||||||
|
if bucket_norm not in {"day", "week", "month"}:
|
||||||
|
bucket_norm = "week"
|
||||||
|
tracker_ids_norm: List[int] = sorted({int(t) for t in trackerIds or []})
|
||||||
|
category_ids_norm: List[int] = sorted({int(c) for c in categoryIds or []})
|
||||||
|
status_norm = (statusFilter or "*").lower()
|
||||||
|
if status_norm not in {"*", "open", "closed"}:
|
||||||
|
status_norm = "*"
|
||||||
|
|
||||||
|
cache = _getStatsCache()
|
||||||
|
# Cache key now includes the new dimensions so different filter combos
|
||||||
|
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
|
||||||
|
# for us, so we can pass them directly as extra dimensions.
|
||||||
|
cache_key = cache.buildKey(
|
||||||
|
featureInstanceId, dateFrom, dateTo, bucket_norm, tracker_ids_norm,
|
||||||
|
category_ids_norm, status_norm,
|
||||||
|
)
|
||||||
|
cached = cache.get(cache_key)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
# Lazy import: keeps the pure aggregation helpers below importable
|
||||||
|
# without dragging in aiohttp / DB connector at module load.
|
||||||
|
from modules.features.redmine.serviceRedmine import (
|
||||||
|
getProjectMeta,
|
||||||
|
listTickets,
|
||||||
|
)
|
||||||
|
|
||||||
|
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
|
||||||
|
root_tracker_id = schema.rootTrackerId
|
||||||
|
|
||||||
|
tickets = listTickets(
|
||||||
|
currentUser,
|
||||||
|
mandateId,
|
||||||
|
featureInstanceId,
|
||||||
|
trackerIds=tracker_ids_norm or None,
|
||||||
|
statusFilter=status_norm,
|
||||||
|
)
|
||||||
|
if category_ids_norm:
|
||||||
|
cat_set = set(category_ids_norm)
|
||||||
|
tickets = [t for t in tickets if t.categoryId in cat_set]
|
||||||
|
|
||||||
|
stats = _aggregate(
|
||||||
|
tickets,
|
||||||
|
schema=schema,
|
||||||
|
rootTrackerId=root_tracker_id,
|
||||||
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
bucket=bucket_norm,
|
||||||
|
trackerIdsFilter=tracker_ids_norm,
|
||||||
|
categoryIdsFilter=category_ids_norm,
|
||||||
|
statusFilter=status_norm,
|
||||||
|
instanceId=featureInstanceId,
|
||||||
|
)
|
||||||
|
|
||||||
|
cache.set(cache_key, stats)
|
||||||
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Pure aggregation (testable without I/O)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _aggregate(
|
||||||
|
tickets: List[RedmineTicketDto],
|
||||||
|
*,
|
||||||
|
schema: Optional[RedmineFieldSchemaDto],
|
||||||
|
rootTrackerId: Optional[int],
|
||||||
|
dateFrom: Optional[str],
|
||||||
|
dateTo: Optional[str],
|
||||||
|
bucket: str,
|
||||||
|
trackerIdsFilter: List[int],
|
||||||
|
categoryIdsFilter: List[int],
|
||||||
|
statusFilter: str,
|
||||||
|
instanceId: str,
|
||||||
|
) -> RedmineStatsDto:
|
||||||
|
period_from = _parseIsoDate(dateFrom)
|
||||||
|
period_to = _parseIsoDate(dateTo)
|
||||||
|
|
||||||
|
kpis = _kpis(tickets, rootTrackerId, period_from, period_to)
|
||||||
|
status_by_tracker = _statusByTracker(tickets, schema)
|
||||||
|
throughput = _throughput(tickets, period_from, period_to, bucket)
|
||||||
|
top_assignees = _topAssignees(tickets, limit=10)
|
||||||
|
relation_distribution = _relationDistribution(tickets)
|
||||||
|
backlog_aging = _backlogAging(tickets, now=_utcNow())
|
||||||
|
|
||||||
|
return RedmineStatsDto(
|
||||||
|
instanceId=instanceId,
|
||||||
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
bucket=bucket,
|
||||||
|
trackerIds=trackerIdsFilter,
|
||||||
|
categoryIds=categoryIdsFilter,
|
||||||
|
statusFilter=statusFilter,
|
||||||
|
kpis=kpis,
|
||||||
|
statusByTracker=status_by_tracker,
|
||||||
|
throughput=throughput,
|
||||||
|
topAssignees=top_assignees,
|
||||||
|
relationDistribution=relation_distribution,
|
||||||
|
backlogAging=backlog_aging,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Section builders
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _kpis(
|
||||||
|
tickets: List[RedmineTicketDto],
|
||||||
|
rootTrackerId: Optional[int],
|
||||||
|
periodFrom: Optional[_dt.datetime],
|
||||||
|
periodTo: Optional[_dt.datetime],
|
||||||
|
) -> RedmineStatsKpis:
|
||||||
|
total = len(tickets)
|
||||||
|
open_count = sum(1 for t in tickets if not t.isClosed)
|
||||||
|
closed_count = sum(1 for t in tickets if t.isClosed)
|
||||||
|
|
||||||
|
closed_in_period = 0
|
||||||
|
created_in_period = 0
|
||||||
|
for t in tickets:
|
||||||
|
created = _parseIsoDate(t.createdOn)
|
||||||
|
updated = _parseIsoDate(t.updatedOn)
|
||||||
|
if created and _inPeriod(created, periodFrom, periodTo):
|
||||||
|
created_in_period += 1
|
||||||
|
if t.isClosed and updated and _inPeriod(updated, periodFrom, periodTo):
|
||||||
|
closed_in_period += 1
|
||||||
|
|
||||||
|
orphans = _countOrphans(tickets, rootTrackerId)
|
||||||
|
|
||||||
|
return RedmineStatsKpis(
|
||||||
|
total=total,
|
||||||
|
open=open_count,
|
||||||
|
closed=closed_count,
|
||||||
|
closedInPeriod=closed_in_period,
|
||||||
|
createdInPeriod=created_in_period,
|
||||||
|
orphans=orphans,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _countOrphans(
|
||||||
|
tickets: List[RedmineTicketDto], rootTrackerId: Optional[int]
|
||||||
|
) -> int:
|
||||||
|
"""A ticket is an orphan if it is not a root user-story AND not
|
||||||
|
reachable (via parent or any relation, in either direction) to any
|
||||||
|
root user-story within the same loaded set."""
|
||||||
|
if not tickets:
|
||||||
|
return 0
|
||||||
|
by_id: Dict[int, RedmineTicketDto] = {t.id: t for t in tickets}
|
||||||
|
roots: set[int] = {
|
||||||
|
t.id for t in tickets if rootTrackerId and t.trackerId == rootTrackerId
|
||||||
|
}
|
||||||
|
if not roots:
|
||||||
|
return sum(1 for t in tickets if not (rootTrackerId and t.trackerId == rootTrackerId))
|
||||||
|
|
||||||
|
adjacency: Dict[int, set[int]] = defaultdict(set)
|
||||||
|
for t in tickets:
|
||||||
|
if t.parentId is not None and t.parentId in by_id:
|
||||||
|
adjacency[t.id].add(t.parentId)
|
||||||
|
adjacency[t.parentId].add(t.id)
|
||||||
|
for r in t.relations:
|
||||||
|
for a, b in ((r.issueId, r.issueToId), (r.issueToId, r.issueId)):
|
||||||
|
if a in by_id and b in by_id and a != b:
|
||||||
|
adjacency[a].add(b)
|
||||||
|
|
||||||
|
reached: set[int] = set(roots)
|
||||||
|
frontier: List[int] = list(roots)
|
||||||
|
while frontier:
|
||||||
|
nxt: List[int] = []
|
||||||
|
for tid in frontier:
|
||||||
|
for neighbour in adjacency.get(tid, ()): # type: ignore[arg-type]
|
||||||
|
if neighbour not in reached:
|
||||||
|
reached.add(neighbour)
|
||||||
|
nxt.append(neighbour)
|
||||||
|
frontier = nxt
|
||||||
|
return sum(1 for t in tickets if t.id not in reached)
|
||||||
|
|
||||||
|
|
||||||
|
def _statusByTracker(
|
||||||
|
tickets: List[RedmineTicketDto], schema: Optional[RedmineFieldSchemaDto]
|
||||||
|
) -> List[RedmineStatusByTrackerEntry]:
|
||||||
|
by_tracker: Dict[Tuple[Optional[int], str], Counter] = defaultdict(Counter)
|
||||||
|
for t in tickets:
|
||||||
|
key = (t.trackerId, t.trackerName or "(unbekannt)")
|
||||||
|
by_tracker[key][t.statusName or "(unbekannt)"] += 1
|
||||||
|
out: List[RedmineStatusByTrackerEntry] = []
|
||||||
|
for (tid, tname), ctr in by_tracker.items():
|
||||||
|
out.append(
|
||||||
|
RedmineStatusByTrackerEntry(
|
||||||
|
trackerId=tid,
|
||||||
|
trackerName=tname,
|
||||||
|
countsByStatus=dict(ctr),
|
||||||
|
total=sum(ctr.values()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
out.sort(key=lambda e: e.total, reverse=True)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _throughput(
|
||||||
|
tickets: List[RedmineTicketDto],
|
||||||
|
periodFrom: Optional[_dt.datetime],
|
||||||
|
periodTo: Optional[_dt.datetime],
|
||||||
|
bucket: str,
|
||||||
|
) -> List[RedmineThroughputBucket]:
|
||||||
|
"""Build per-bucket snapshots: how many tickets exist at the END of
|
||||||
|
each bucket, and how many of those are still open at that point.
|
||||||
|
|
||||||
|
``created`` / ``closed`` keep the raw delta numbers so callers (and
|
||||||
|
AI tools) that want the flow can still see them. The UI line chart
|
||||||
|
plots ``cumTotal`` and ``cumOpen``.
|
||||||
|
"""
|
||||||
|
if not tickets:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# If no period is set, span the lifetime of the data.
|
||||||
|
if periodFrom is None or periodTo is None:
|
||||||
|
all_dates: List[_dt.datetime] = []
|
||||||
|
for t in tickets:
|
||||||
|
for s in (t.createdOn, t.updatedOn):
|
||||||
|
d = _parseIsoDate(s)
|
||||||
|
if d:
|
||||||
|
all_dates.append(d)
|
||||||
|
if not all_dates:
|
||||||
|
return []
|
||||||
|
periodFrom = periodFrom or min(all_dates)
|
||||||
|
periodTo = periodTo or max(all_dates)
|
||||||
|
|
||||||
|
# 1) Per-bucket flow counters (created / closed) within the period.
|
||||||
|
created_counter: Counter = Counter()
|
||||||
|
closed_counter: Counter = Counter()
|
||||||
|
for t in tickets:
|
||||||
|
c = _parseIsoDate(t.createdOn)
|
||||||
|
if c and _inPeriod(c, periodFrom, periodTo):
|
||||||
|
created_counter[_bucketKey(c, bucket)] += 1
|
||||||
|
if t.isClosed:
|
||||||
|
u = _parseIsoDate(t.updatedOn)
|
||||||
|
if u and _inPeriod(u, periodFrom, periodTo):
|
||||||
|
closed_counter[_bucketKey(u, bucket)] += 1
|
||||||
|
|
||||||
|
# 2) Build the contiguous list of bucket keys spanning [from, to] so
|
||||||
|
# the line chart has a stable x-axis even for empty intervals.
|
||||||
|
bucket_keys = _bucketKeysBetween(periodFrom, periodTo, bucket)
|
||||||
|
if not bucket_keys:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# 3) Snapshot counts: total = #created with createdOn <= bucket end;
|
||||||
|
# open = total - #closed with closedTs <= bucket end. We compute
|
||||||
|
# against ALL tickets (not just the period-windowed counters) so
|
||||||
|
# pre-period tickets are correctly counted in the snapshot.
|
||||||
|
created_dates: List[_dt.datetime] = []
|
||||||
|
closed_dates: List[_dt.datetime] = []
|
||||||
|
for t in tickets:
|
||||||
|
c = _parseIsoDate(t.createdOn)
|
||||||
|
if c:
|
||||||
|
created_dates.append(c)
|
||||||
|
if t.isClosed:
|
||||||
|
u = _parseIsoDate(t.updatedOn)
|
||||||
|
if u:
|
||||||
|
closed_dates.append(u)
|
||||||
|
created_dates.sort()
|
||||||
|
closed_dates.sort()
|
||||||
|
|
||||||
|
out: List[RedmineThroughputBucket] = []
|
||||||
|
for key in bucket_keys:
|
||||||
|
edge = _bucketEnd(key, bucket)
|
||||||
|
cum_total = _countLE(created_dates, edge)
|
||||||
|
cum_closed = _countLE(closed_dates, edge)
|
||||||
|
cum_open = max(0, cum_total - cum_closed)
|
||||||
|
out.append(
|
||||||
|
RedmineThroughputBucket(
|
||||||
|
bucketKey=key,
|
||||||
|
label=_bucketLabel(key, bucket),
|
||||||
|
created=int(created_counter.get(key, 0)),
|
||||||
|
closed=int(closed_counter.get(key, 0)),
|
||||||
|
cumTotal=int(cum_total),
|
||||||
|
cumOpen=int(cum_open),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _countLE(sortedDates: List[_dt.datetime], edge: _dt.datetime) -> int:
|
||||||
|
"""Binary search: how many entries in ``sortedDates`` are <= ``edge``."""
|
||||||
|
return bisect.bisect_right(sortedDates, edge)
|
||||||
|
|
||||||
|
|
||||||
|
def _bucketKeysBetween(
|
||||||
|
fromD: _dt.datetime, toD: _dt.datetime, bucket: str
|
||||||
|
) -> List[str]:
|
||||||
|
"""Inclusive list of bucket keys covering ``[fromD, toD]``."""
|
||||||
|
if toD < fromD:
|
||||||
|
return []
|
||||||
|
keys: List[str] = []
|
||||||
|
seen: set[str] = set()
|
||||||
|
cursor = fromD
|
||||||
|
safety = 0
|
||||||
|
step = (
|
||||||
|
_dt.timedelta(days=1) if bucket == "day"
|
||||||
|
else _dt.timedelta(days=7) if bucket == "week"
|
||||||
|
else _dt.timedelta(days=27) # month: walk in <31d steps so we never skip
|
||||||
|
)
|
||||||
|
while cursor <= toD and safety < 5000:
|
||||||
|
k = _bucketKey(cursor, bucket)
|
||||||
|
if k not in seen:
|
||||||
|
seen.add(k)
|
||||||
|
keys.append(k)
|
||||||
|
cursor += step
|
||||||
|
safety += 1
|
||||||
|
# Guarantee the toD bucket is included (loop's last cursor may be < toD
|
||||||
|
# if step doesn't divide the interval cleanly, esp. for months).
|
||||||
|
last_key = _bucketKey(toD, bucket)
|
||||||
|
if last_key not in seen:
|
||||||
|
keys.append(last_key)
|
||||||
|
keys.sort()
|
||||||
|
return keys
|
||||||
|
|
||||||
|
|
||||||
|
def _bucketEnd(key: str, bucket: str) -> _dt.datetime:
|
||||||
|
"""Last-instant timestamp covered by the given bucket key."""
|
||||||
|
if bucket == "day":
|
||||||
|
d = _dt.datetime.strptime(key, "%Y-%m-%d")
|
||||||
|
return d.replace(hour=23, minute=59, second=59)
|
||||||
|
if bucket == "month":
|
||||||
|
d = _dt.datetime.strptime(key, "%Y-%m")
|
||||||
|
# First of next month minus one second.
|
||||||
|
if d.month == 12:
|
||||||
|
nxt = d.replace(year=d.year + 1, month=1)
|
||||||
|
else:
|
||||||
|
nxt = d.replace(month=d.month + 1)
|
||||||
|
return nxt - _dt.timedelta(seconds=1)
|
||||||
|
# week: ISO format ``YYYY-Www``. End = Sunday 23:59:59 of that week.
|
||||||
|
try:
|
||||||
|
year_str, week_str = key.split("-W")
|
||||||
|
year = int(year_str)
|
||||||
|
week = int(week_str)
|
||||||
|
# ``%G-%V-%u`` parses ISO year/week/day; %u=1 is Monday.
|
||||||
|
monday = _dt.datetime.strptime(f"{year}-{week:02d}-1", "%G-%V-%u")
|
||||||
|
return monday + _dt.timedelta(days=6, hours=23, minutes=59, seconds=59)
|
||||||
|
except Exception:
|
||||||
|
return _utcNow()
|
||||||
|
|
||||||
|
|
||||||
|
def _topAssignees(
|
||||||
|
tickets: List[RedmineTicketDto], *, limit: int = 10
|
||||||
|
) -> List[RedmineAssigneeBucket]:
|
||||||
|
by_assignee: Dict[Tuple[Optional[int], str], int] = defaultdict(int)
|
||||||
|
for t in tickets:
|
||||||
|
if t.isClosed:
|
||||||
|
continue
|
||||||
|
key = (t.assignedToId, t.assignedToName or "(nicht zugewiesen)")
|
||||||
|
by_assignee[key] += 1
|
||||||
|
sorted_items = sorted(by_assignee.items(), key=lambda kv: kv[1], reverse=True)[:limit]
|
||||||
|
return [
|
||||||
|
RedmineAssigneeBucket(assignedToId=k[0], name=k[1], open=v)
|
||||||
|
for k, v in sorted_items
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _relationDistribution(
|
||||||
|
tickets: List[RedmineTicketDto],
|
||||||
|
) -> List[RedmineRelationDistributionEntry]:
|
||||||
|
seen: set[int] = set()
|
||||||
|
counter: Counter = Counter()
|
||||||
|
for t in tickets:
|
||||||
|
for r in t.relations:
|
||||||
|
if r.id in seen:
|
||||||
|
continue
|
||||||
|
seen.add(r.id)
|
||||||
|
counter[r.relationType or "relates"] += 1
|
||||||
|
return [
|
||||||
|
RedmineRelationDistributionEntry(relationType=k, count=v)
|
||||||
|
for k, v in sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _backlogAging(
|
||||||
|
tickets: List[RedmineTicketDto], *, now: Optional[_dt.datetime] = None
|
||||||
|
) -> List[RedmineAgingBucket]:
|
||||||
|
if now is None:
|
||||||
|
now = _utcNow()
|
||||||
|
buckets = [
|
||||||
|
RedmineAgingBucket(bucketKey="lt7", label="< 7 Tage", minDays=0, maxDays=7),
|
||||||
|
RedmineAgingBucket(bucketKey="7-30", label="7-30 Tage", minDays=7, maxDays=30),
|
||||||
|
RedmineAgingBucket(bucketKey="30-90", label="30-90 Tage", minDays=30, maxDays=90),
|
||||||
|
RedmineAgingBucket(bucketKey="90-180", label="90-180 Tage", minDays=90, maxDays=180),
|
||||||
|
RedmineAgingBucket(bucketKey="gt180", label="> 180 Tage", minDays=180, maxDays=None),
|
||||||
|
]
|
||||||
|
for t in tickets:
|
||||||
|
if t.isClosed:
|
||||||
|
continue
|
||||||
|
ref = _parseIsoDate(t.updatedOn) or _parseIsoDate(t.createdOn)
|
||||||
|
if ref is None:
|
||||||
|
continue
|
||||||
|
age_days = max(0, (now - ref).days)
|
||||||
|
for b in buckets:
|
||||||
|
if (b.maxDays is None and age_days >= b.minDays) or (
|
||||||
|
b.maxDays is not None and b.minDays <= age_days < b.maxDays
|
||||||
|
):
|
||||||
|
b.count += 1
|
||||||
|
break
|
||||||
|
return buckets
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Date helpers (no external deps)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _utcNow() -> _dt.datetime:
|
||||||
|
"""Naive UTC ``datetime`` -- the rest of the helpers compare naive
|
||||||
|
objects, so we strip tz info on purpose."""
|
||||||
|
return _dt.datetime.now(_dt.timezone.utc).replace(tzinfo=None)
|
||||||
|
|
||||||
|
|
||||||
|
def _parseIsoDate(value: Optional[str]) -> Optional[_dt.datetime]:
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
s = value.replace("Z", "+00:00") if isinstance(value, str) else value
|
||||||
|
if isinstance(s, str) and "T" not in s and len(s) == 10:
|
||||||
|
return _dt.datetime.strptime(s, "%Y-%m-%d")
|
||||||
|
return _dt.datetime.fromisoformat(s).replace(tzinfo=None)
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
return _dt.datetime.strptime(str(value)[:10], "%Y-%m-%d")
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _inPeriod(
|
||||||
|
when: _dt.datetime,
|
||||||
|
fromDate: Optional[_dt.datetime],
|
||||||
|
toDate: Optional[_dt.datetime],
|
||||||
|
) -> bool:
|
||||||
|
if fromDate and when < fromDate:
|
||||||
|
return False
|
||||||
|
if toDate and when > toDate + _dt.timedelta(days=1):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _bucketKey(when: _dt.datetime, bucket: str) -> str:
|
||||||
|
if bucket == "day":
|
||||||
|
return when.strftime("%Y-%m-%d")
|
||||||
|
if bucket == "month":
|
||||||
|
return when.strftime("%Y-%m")
|
||||||
|
iso_year, iso_week, _ = when.isocalendar()
|
||||||
|
return f"{iso_year}-W{iso_week:02d}"
|
||||||
|
|
||||||
|
|
||||||
|
def _bucketLabel(key: str, bucket: str) -> str:
|
||||||
|
if bucket == "day":
|
||||||
|
return key
|
||||||
|
if bucket == "month":
|
||||||
|
try:
|
||||||
|
d = _dt.datetime.strptime(key, "%Y-%m")
|
||||||
|
return d.strftime("%b %Y")
|
||||||
|
except Exception:
|
||||||
|
return key
|
||||||
|
return key
|
||||||
131
modules/features/redmine/serviceRedmineStatsCache.py
Normal file
131
modules/features/redmine/serviceRedmineStatsCache.py
Normal file
|
|
@ -0,0 +1,131 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""TTL-based in-memory cache for ``serviceRedmineStats`` results.
|
||||||
|
|
||||||
|
The cache key is ``(featureInstanceId, dateFrom, dateTo, bucket, sorted(trackerIds))``.
|
||||||
|
Any write through ``serviceRedmine`` (createIssue, updateIssue, deleteIssue,
|
||||||
|
addRelation, deleteRelation) MUST call :func:`invalidateInstance` to drop
|
||||||
|
all cached entries for that feature instance.
|
||||||
|
|
||||||
|
Default TTL: 90 seconds. Override at construction or via ``setTtl``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Dict, Iterable, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
_DEFAULT_TTL_SECONDS = 90.0
|
||||||
|
|
||||||
|
|
||||||
|
def _freeze(value: Any) -> Any:
|
||||||
|
"""Make ``value`` hashable so it can live in a tuple cache key.
|
||||||
|
|
||||||
|
Lists / sets become sorted tuples; dicts become sorted item tuples;
|
||||||
|
everything else is returned untouched.
|
||||||
|
"""
|
||||||
|
if isinstance(value, (list, set, tuple)):
|
||||||
|
try:
|
||||||
|
return tuple(sorted(value))
|
||||||
|
except TypeError:
|
||||||
|
return tuple(value)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return tuple(sorted(value.items()))
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class _CacheEntry:
|
||||||
|
value: Any
|
||||||
|
expiresAt: float
|
||||||
|
|
||||||
|
|
||||||
|
CacheKey = Tuple[str, Optional[str], Optional[str], str, Tuple[int, ...], Tuple[Any, ...]]
|
||||||
|
|
||||||
|
|
||||||
|
class RedmineStatsCache:
|
||||||
|
"""Thread-safe TTL cache."""
|
||||||
|
|
||||||
|
def __init__(self, ttlSeconds: float = _DEFAULT_TTL_SECONDS) -> None:
|
||||||
|
self._ttlSeconds = float(ttlSeconds)
|
||||||
|
self._store: Dict[CacheKey, _CacheEntry] = {}
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
|
||||||
|
def setTtl(self, ttlSeconds: float) -> None:
|
||||||
|
self._ttlSeconds = float(ttlSeconds)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def buildKey(
|
||||||
|
featureInstanceId: str,
|
||||||
|
dateFrom: Optional[str],
|
||||||
|
dateTo: Optional[str],
|
||||||
|
bucket: str,
|
||||||
|
trackerIds: Iterable[int],
|
||||||
|
*extraDims: Any,
|
||||||
|
) -> CacheKey:
|
||||||
|
"""Build a cache key for the given query.
|
||||||
|
|
||||||
|
``extraDims`` is an open-ended tail so callers can add more filter
|
||||||
|
dimensions (e.g. ``categoryIds``, ``statusFilter``) without forcing
|
||||||
|
a signature break here. Pass them as already-canonicalised values
|
||||||
|
(sorted lists, normalised strings, ...) so the same query always
|
||||||
|
produces the same key.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
str(featureInstanceId),
|
||||||
|
dateFrom or None,
|
||||||
|
dateTo or None,
|
||||||
|
(bucket or "week").lower(),
|
||||||
|
tuple(sorted(int(t) for t in trackerIds or [])),
|
||||||
|
tuple(_freeze(d) for d in extraDims),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, key: CacheKey) -> Optional[Any]:
|
||||||
|
now = time.monotonic()
|
||||||
|
with self._lock:
|
||||||
|
entry = self._store.get(key)
|
||||||
|
if not entry:
|
||||||
|
return None
|
||||||
|
if entry.expiresAt < now:
|
||||||
|
self._store.pop(key, None)
|
||||||
|
return None
|
||||||
|
return entry.value
|
||||||
|
|
||||||
|
def set(self, key: CacheKey, value: Any, *, ttlSeconds: Optional[float] = None) -> None:
|
||||||
|
ttl = float(ttlSeconds) if ttlSeconds is not None else self._ttlSeconds
|
||||||
|
with self._lock:
|
||||||
|
self._store[key] = _CacheEntry(value=value, expiresAt=time.monotonic() + ttl)
|
||||||
|
|
||||||
|
def invalidateInstance(self, featureInstanceId: str) -> int:
|
||||||
|
"""Drop every entry whose key starts with ``featureInstanceId``.
|
||||||
|
|
||||||
|
Returns the number of entries dropped.
|
||||||
|
"""
|
||||||
|
target = str(featureInstanceId)
|
||||||
|
with self._lock:
|
||||||
|
to_drop = [k for k in self._store.keys() if k[0] == target]
|
||||||
|
for k in to_drop:
|
||||||
|
self._store.pop(k, None)
|
||||||
|
return len(to_drop)
|
||||||
|
|
||||||
|
def clear(self) -> None:
|
||||||
|
with self._lock:
|
||||||
|
self._store.clear()
|
||||||
|
|
||||||
|
def size(self) -> int:
|
||||||
|
with self._lock:
|
||||||
|
return len(self._store)
|
||||||
|
|
||||||
|
|
||||||
|
_globalCache: Optional[RedmineStatsCache] = None
|
||||||
|
|
||||||
|
|
||||||
|
def _getStatsCache() -> RedmineStatsCache:
|
||||||
|
"""Process-wide singleton."""
|
||||||
|
global _globalCache
|
||||||
|
if _globalCache is None:
|
||||||
|
_globalCache = RedmineStatsCache()
|
||||||
|
return _globalCache
|
||||||
323
modules/features/redmine/serviceRedmineSync.py
Normal file
323
modules/features/redmine/serviceRedmineSync.py
Normal file
|
|
@ -0,0 +1,323 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Incremental Redmine -> ``poweron_redmine`` mirror sync.
|
||||||
|
|
||||||
|
Strategy:
|
||||||
|
- **Full sync** when ``RedmineInstanceConfig.lastSyncAt`` is None or
|
||||||
|
``force=True`` is requested. Pulls every issue with ``status_id=*``
|
||||||
|
(open + closed) for the configured project, paginated.
|
||||||
|
- **Incremental sync** otherwise. Pulls only issues whose ``updated_on``
|
||||||
|
is greater than ``lastSyncAt - overlapSeconds`` (default 1h overlap to
|
||||||
|
catch clock skew and missed updates).
|
||||||
|
- Each issue is upserted into ``RedmineTicketMirror`` (looked up by
|
||||||
|
``(featureInstanceId, redmineId)``).
|
||||||
|
- The full set of relations attached to each issue replaces any existing
|
||||||
|
relation rows for that issue in ``RedmineRelationMirror``.
|
||||||
|
|
||||||
|
Concurrency: a per-instance ``asyncio.Lock`` prevents two concurrent
|
||||||
|
syncs for the same feature instance.
|
||||||
|
|
||||||
|
After every successful sync the in-memory stats cache is invalidated for
|
||||||
|
the instance.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from modules.connectors.connectorTicketsRedmine import RedmineApiError
|
||||||
|
from modules.datamodels.datamodelUam import User
|
||||||
|
from modules.features.redmine.datamodelRedmine import (
|
||||||
|
RedmineInstanceConfig,
|
||||||
|
RedmineRelationMirror,
|
||||||
|
RedmineSyncResultDto,
|
||||||
|
RedmineSyncStatusDto,
|
||||||
|
RedmineTicketMirror,
|
||||||
|
)
|
||||||
|
from modules.features.redmine.interfaceFeatureRedmine import getInterface
|
||||||
|
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_INCREMENTAL_OVERLAP_SECONDS = 60 * 60 # 1h overlap on incremental syncs
|
||||||
|
_DEFAULT_PAGE_SIZE = 100
|
||||||
|
_MAX_PAGES_SAFETY = 5000 # 500k tickets safety cap
|
||||||
|
|
||||||
|
_locks: Dict[str, asyncio.Lock] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _lockFor(featureInstanceId: str) -> asyncio.Lock:
|
||||||
|
if featureInstanceId not in _locks:
|
||||||
|
_locks[featureInstanceId] = asyncio.Lock()
|
||||||
|
return _locks[featureInstanceId]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Public API
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def runSync(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
*,
|
||||||
|
force: bool = False,
|
||||||
|
pageSize: int = _DEFAULT_PAGE_SIZE,
|
||||||
|
) -> RedmineSyncResultDto:
|
||||||
|
"""Run a (full or incremental) sync for the given feature instance."""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
connector = iface.resolveConnector(featureInstanceId)
|
||||||
|
cfg = iface.getConfig(featureInstanceId)
|
||||||
|
if not connector or not cfg:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Redmine instance {featureInstanceId} is not configured or inactive"
|
||||||
|
)
|
||||||
|
|
||||||
|
async with _lockFor(featureInstanceId):
|
||||||
|
started = time.monotonic()
|
||||||
|
full = force or cfg.lastSyncAt is None
|
||||||
|
updated_from_iso: Optional[str] = None
|
||||||
|
if not full and cfg.lastSyncAt is not None:
|
||||||
|
cursor_epoch = max(0.0, cfg.lastSyncAt - _INCREMENTAL_OVERLAP_SECONDS)
|
||||||
|
updated_from_iso = time.strftime(
|
||||||
|
"%Y-%m-%dT%H:%M:%SZ", time.gmtime(cursor_epoch)
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
issues = await connector.listAllIssues(
|
||||||
|
statusId="*",
|
||||||
|
updatedOnFrom=updated_from_iso,
|
||||||
|
pageSize=pageSize,
|
||||||
|
maxPages=_MAX_PAGES_SAFETY,
|
||||||
|
include=["relations"],
|
||||||
|
)
|
||||||
|
except RedmineApiError as e:
|
||||||
|
iface.recordSyncFailure(featureInstanceId, str(e))
|
||||||
|
raise
|
||||||
|
|
||||||
|
tickets_upserted = 0
|
||||||
|
relations_upserted = 0
|
||||||
|
now_epoch = time.time()
|
||||||
|
|
||||||
|
for issue in issues:
|
||||||
|
tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||||
|
relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||||
|
|
||||||
|
duration_ms = int((time.monotonic() - started) * 1000)
|
||||||
|
iface.recordSyncSuccess(
|
||||||
|
featureInstanceId,
|
||||||
|
full=full,
|
||||||
|
ticketsUpserted=tickets_upserted,
|
||||||
|
durationMs=duration_ms,
|
||||||
|
lastSyncAt=now_epoch,
|
||||||
|
)
|
||||||
|
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
|
|
||||||
|
return RedmineSyncResultDto(
|
||||||
|
instanceId=featureInstanceId,
|
||||||
|
full=full,
|
||||||
|
ticketsUpserted=tickets_upserted,
|
||||||
|
relationsUpserted=relations_upserted,
|
||||||
|
durationMs=duration_ms,
|
||||||
|
lastSyncAt=now_epoch,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def getSyncStatus(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
) -> RedmineSyncStatusDto:
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
cfg = iface.getConfig(featureInstanceId)
|
||||||
|
ticket_count = iface.countMirroredTickets(featureInstanceId)
|
||||||
|
relation_count = iface.countMirroredRelations(featureInstanceId)
|
||||||
|
return RedmineSyncStatusDto(
|
||||||
|
instanceId=featureInstanceId,
|
||||||
|
lastSyncAt=cfg.lastSyncAt if cfg else None,
|
||||||
|
lastFullSyncAt=cfg.lastFullSyncAt if cfg else None,
|
||||||
|
lastSyncDurationMs=cfg.lastSyncDurationMs if cfg else None,
|
||||||
|
lastSyncTicketCount=cfg.lastSyncTicketCount if cfg else None,
|
||||||
|
lastSyncErrorAt=cfg.lastSyncErrorAt if cfg else None,
|
||||||
|
lastSyncErrorMessage=cfg.lastSyncErrorMessage if cfg else None,
|
||||||
|
mirroredTicketCount=ticket_count,
|
||||||
|
mirroredRelationCount=relation_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def upsertSingleTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
) -> int:
|
||||||
|
"""Re-fetch one issue from Redmine and upsert it into the mirror.
|
||||||
|
|
||||||
|
Used by the write paths in ``serviceRedmine`` so the mirror stays
|
||||||
|
consistent after every create / update without a full sync.
|
||||||
|
Returns the number of relation rows replaced.
|
||||||
|
"""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
connector = iface.resolveConnector(featureInstanceId)
|
||||||
|
if not connector:
|
||||||
|
raise RuntimeError("Redmine instance not configured")
|
||||||
|
issue = await connector.getIssue(int(issueId), includeRelations=True)
|
||||||
|
now_epoch = time.time()
|
||||||
|
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||||
|
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||||
|
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
|
return relations_upserted
|
||||||
|
|
||||||
|
|
||||||
|
def deleteMirroredTicket(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
issueId: int,
|
||||||
|
) -> bool:
|
||||||
|
"""Drop a ticket and its relations from the mirror after a successful Redmine DELETE."""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
|
||||||
|
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
|
||||||
|
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
|
return deleted
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Per-issue upsert helpers (sync, run inside the per-instance lock)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _upsertTicket(
|
||||||
|
iface,
|
||||||
|
featureInstanceId: str,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
issue: Dict[str, Any],
|
||||||
|
nowEpoch: float,
|
||||||
|
) -> int:
|
||||||
|
redmine_id = issue.get("id")
|
||||||
|
if redmine_id is None:
|
||||||
|
return 0
|
||||||
|
statuses_lookup = (iface.getConfig(featureInstanceId).schemaCache or {}).get("statuses") or []
|
||||||
|
is_closed = _statusIsClosed(issue.get("status") or {}, statuses_lookup)
|
||||||
|
record = _ticketRecordFromIssue(issue, featureInstanceId, mandateId, is_closed, nowEpoch)
|
||||||
|
iface.upsertMirroredTicket(featureInstanceId, int(redmine_id), record)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def _replaceRelations(
|
||||||
|
iface,
|
||||||
|
featureInstanceId: str,
|
||||||
|
issue: Dict[str, Any],
|
||||||
|
nowEpoch: float,
|
||||||
|
) -> int:
|
||||||
|
issue_id = issue.get("id")
|
||||||
|
relations = issue.get("relations") or []
|
||||||
|
if issue_id is None:
|
||||||
|
return 0
|
||||||
|
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issue_id))
|
||||||
|
inserted = 0
|
||||||
|
for r in relations:
|
||||||
|
rid = r.get("id")
|
||||||
|
if rid is None:
|
||||||
|
continue
|
||||||
|
iface.insertMirroredRelation(
|
||||||
|
featureInstanceId,
|
||||||
|
{
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"redmineRelationId": int(rid),
|
||||||
|
"issueId": int(r.get("issue_id") or 0),
|
||||||
|
"issueToId": int(r.get("issue_to_id") or 0),
|
||||||
|
"relationType": str(r.get("relation_type") or "relates"),
|
||||||
|
"delay": r.get("delay"),
|
||||||
|
"syncedAt": nowEpoch,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
inserted += 1
|
||||||
|
return inserted
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Pure helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _statusIsClosed(status: Dict[str, Any], statusesLookup: List[Dict[str, Any]]) -> bool:
|
||||||
|
"""Best-effort: prefer the schemaCache; fall back to inspecting the
|
||||||
|
raw issue (Redmine sets ``is_closed`` on the status object only when
|
||||||
|
explicitly requested)."""
|
||||||
|
sid = status.get("id")
|
||||||
|
if sid is None:
|
||||||
|
return False
|
||||||
|
for s in statusesLookup:
|
||||||
|
if s.get("id") == sid:
|
||||||
|
return bool(s.get("isClosed"))
|
||||||
|
return bool(status.get("is_closed"))
|
||||||
|
|
||||||
|
|
||||||
|
def _parseRedmineDateToEpoch(value: Optional[str]) -> Optional[float]:
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
from datetime import datetime
|
||||||
|
s = value.replace("Z", "+00:00")
|
||||||
|
return datetime.fromisoformat(s).timestamp()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _ticketRecordFromIssue(
|
||||||
|
issue: Dict[str, Any],
|
||||||
|
featureInstanceId: str,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
isClosed: bool,
|
||||||
|
nowEpoch: float,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
tracker = issue.get("tracker") or {}
|
||||||
|
status = issue.get("status") or {}
|
||||||
|
priority = issue.get("priority") or {}
|
||||||
|
assigned = issue.get("assigned_to") or {}
|
||||||
|
author = issue.get("author") or {}
|
||||||
|
parent = issue.get("parent") or {}
|
||||||
|
fixed_version = issue.get("fixed_version") or {}
|
||||||
|
category = issue.get("category") or {}
|
||||||
|
created_on = issue.get("created_on")
|
||||||
|
updated_on = issue.get("updated_on")
|
||||||
|
updated_ts = _parseRedmineDateToEpoch(updated_on)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"featureInstanceId": featureInstanceId,
|
||||||
|
"mandateId": mandateId,
|
||||||
|
"redmineId": int(issue.get("id")),
|
||||||
|
"subject": str(issue.get("subject") or ""),
|
||||||
|
"description": str(issue.get("description") or ""),
|
||||||
|
"trackerId": tracker.get("id"),
|
||||||
|
"trackerName": tracker.get("name"),
|
||||||
|
"statusId": status.get("id"),
|
||||||
|
"statusName": status.get("name"),
|
||||||
|
"isClosed": bool(isClosed),
|
||||||
|
"priorityId": priority.get("id"),
|
||||||
|
"priorityName": priority.get("name"),
|
||||||
|
"assignedToId": assigned.get("id"),
|
||||||
|
"assignedToName": assigned.get("name"),
|
||||||
|
"authorId": author.get("id"),
|
||||||
|
"authorName": author.get("name"),
|
||||||
|
"parentId": parent.get("id"),
|
||||||
|
"fixedVersionId": fixed_version.get("id"),
|
||||||
|
"fixedVersionName": fixed_version.get("name"),
|
||||||
|
"categoryId": category.get("id"),
|
||||||
|
"categoryName": category.get("name"),
|
||||||
|
"createdOn": created_on,
|
||||||
|
"updatedOn": updated_on,
|
||||||
|
"createdOnTs": _parseRedmineDateToEpoch(created_on),
|
||||||
|
"updatedOnTs": updated_ts,
|
||||||
|
# Approximation: Redmine doesn't expose a dedicated "closed_on"
|
||||||
|
# timestamp via the issue endpoint. For closed tickets the last
|
||||||
|
# updatedOn is the best stable proxy without scanning journals.
|
||||||
|
"closedOnTs": updated_ts if bool(isClosed) else None,
|
||||||
|
"customFields": list(issue.get("custom_fields") or []),
|
||||||
|
"raw": issue,
|
||||||
|
"syncedAt": nowEpoch,
|
||||||
|
}
|
||||||
|
|
@ -8,28 +8,30 @@ Handles feature initialization and RBAC catalog registration.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Feature metadata
|
# Feature metadata
|
||||||
FEATURE_CODE = "teamsbot"
|
FEATURE_CODE = "teamsbot"
|
||||||
FEATURE_LABEL = "Teams Bot"
|
FEATURE_LABEL = t("Teams Bot", context="UI")
|
||||||
FEATURE_ICON = "mdi-headset"
|
FEATURE_ICON = "mdi-headset"
|
||||||
|
|
||||||
# UI Objects for RBAC catalog
|
# UI Objects for RBAC catalog
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.teamsbot.dashboard",
|
"objectKey": "ui.feature.teamsbot.dashboard",
|
||||||
"label": "Dashboard",
|
"label": t("Dashboard", context="UI"),
|
||||||
"meta": {"area": "dashboard"}
|
"meta": {"area": "dashboard"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.teamsbot.sessions",
|
"objectKey": "ui.feature.teamsbot.sessions",
|
||||||
"label": "Sitzungen",
|
"label": t("Sitzungen", context="UI"),
|
||||||
"meta": {"area": "sessions"}
|
"meta": {"area": "sessions"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.teamsbot.settings",
|
"objectKey": "ui.feature.teamsbot.settings",
|
||||||
"label": "Einstellungen",
|
"label": t("Einstellungen", context="UI"),
|
||||||
"meta": {"area": "settings", "admin_only": True}
|
"meta": {"area": "settings", "admin_only": True}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -38,7 +40,7 @@ UI_OBJECTS = [
|
||||||
DATA_OBJECTS = [
|
DATA_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.teamsbot.TeamsbotSession",
|
"objectKey": "data.feature.teamsbot.TeamsbotSession",
|
||||||
"label": "Sitzung",
|
"label": t("Sitzung", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "TeamsbotSession",
|
"table": "TeamsbotSession",
|
||||||
"fields": ["id", "meetingLink", "botName", "status", "startedAt", "endedAt"],
|
"fields": ["id", "meetingLink", "botName", "status", "startedAt", "endedAt"],
|
||||||
|
|
@ -48,7 +50,7 @@ DATA_OBJECTS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.teamsbot.TeamsbotTranscript",
|
"objectKey": "data.feature.teamsbot.TeamsbotTranscript",
|
||||||
"label": "Transkript",
|
"label": t("Transkript", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "TeamsbotTranscript",
|
"table": "TeamsbotTranscript",
|
||||||
"fields": ["id", "sessionId", "speaker", "text", "timestamp"],
|
"fields": ["id", "sessionId", "speaker", "text", "timestamp"],
|
||||||
|
|
@ -58,7 +60,7 @@ DATA_OBJECTS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.teamsbot.TeamsbotBotResponse",
|
"objectKey": "data.feature.teamsbot.TeamsbotBotResponse",
|
||||||
"label": "Bot-Antwort",
|
"label": t("Bot-Antwort", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "TeamsbotBotResponse",
|
"table": "TeamsbotBotResponse",
|
||||||
"fields": ["id", "sessionId", "responseText", "detectedIntent"],
|
"fields": ["id", "sessionId", "responseText", "detectedIntent"],
|
||||||
|
|
@ -68,7 +70,7 @@ DATA_OBJECTS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.teamsbot.*",
|
"objectKey": "data.feature.teamsbot.*",
|
||||||
"label": "Alle Teams Bot Daten",
|
"label": t("Alle Teams Bot Daten", context="UI"),
|
||||||
"meta": {"wildcard": True, "description": "Wildcard for all teamsbot data tables"}
|
"meta": {"wildcard": True, "description": "Wildcard for all teamsbot data tables"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -77,22 +79,22 @@ DATA_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.teamsbot.session.start",
|
"objectKey": "resource.feature.teamsbot.session.start",
|
||||||
"label": "Sitzung starten",
|
"label": t("Sitzung starten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions", "method": "POST"}
|
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.teamsbot.session.stop",
|
"objectKey": "resource.feature.teamsbot.session.stop",
|
||||||
"label": "Sitzung beenden",
|
"label": t("Sitzung beenden", context="UI"),
|
||||||
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}/stop", "method": "POST"}
|
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}/stop", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.teamsbot.session.delete",
|
"objectKey": "resource.feature.teamsbot.session.delete",
|
||||||
"label": "Sitzung löschen",
|
"label": t("Sitzung löschen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}", "method": "DELETE"}
|
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}", "method": "DELETE"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.teamsbot.config.edit",
|
"objectKey": "resource.feature.teamsbot.config.edit",
|
||||||
"label": "Konfiguration bearbeiten",
|
"label": t("Konfiguration bearbeiten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/teamsbot/{instanceId}/config", "method": "PUT", "admin_only": True}
|
"meta": {"endpoint": "/api/teamsbot/{instanceId}/config", "method": "PUT", "admin_only": True}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -56,6 +56,7 @@ class ConnectorConfigField(BaseModel):
|
||||||
secret: bool = False
|
secret: bool = False
|
||||||
required: bool = True
|
required: bool = True
|
||||||
placeholder: Optional[str] = None
|
placeholder: Optional[str] = None
|
||||||
|
suggestions: Optional[List[str]] = None
|
||||||
|
|
||||||
|
|
||||||
class BaseAccountingConnector(ABC):
|
class BaseAccountingConnector(ABC):
|
||||||
|
|
|
||||||
|
|
@ -2,16 +2,27 @@
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
"""Orchestrates importing accounting data from external systems into TrusteeData* tables.
|
"""Orchestrates importing accounting data from external systems into TrusteeData* tables.
|
||||||
|
|
||||||
Flow: load config → resolve connector → fetch data → clear old records → write new records → compute balances.
|
Flow per phase:
|
||||||
|
1. async fetch from external system (HTTP, awaits cleanly on the event loop)
|
||||||
|
2. await asyncio.to_thread(...) for the DB write phase, so the heavy
|
||||||
|
synchronous psycopg2 calls do NOT block the FastAPI event loop
|
||||||
|
3. inside the worker thread we use bulk delete / bulk insert (single
|
||||||
|
transaction per phase) instead of N+1 single-row operations
|
||||||
|
|
||||||
|
Why this matters: a typical accounting sync is ~10k-100k rows. The old
|
||||||
|
implementation called ``recordCreate`` row-by-row on the event loop, which
|
||||||
|
froze every other request (chat, health-check, etc.) for minutes. See
|
||||||
|
``local/notes/changelog.txt`` for the original incident analysis.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import json as _json
|
import json as _json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Callable, Dict, Any, List, Optional, Type
|
||||||
|
|
||||||
from .accountingConnectorBase import BaseAccountingConnector
|
from .accountingConnectorBase import BaseAccountingConnector
|
||||||
from .accountingRegistry import _getAccountingRegistry
|
from .accountingRegistry import _getAccountingRegistry
|
||||||
|
|
@ -19,30 +30,57 @@ from .accountingRegistry import _getAccountingRegistry
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
_DEBUG_SYNC_DIR = Path("D:/Athi/Local/Web/poweron/local/debug/sync")
|
_HEARTBEAT_EVERY = 500
|
||||||
|
|
||||||
|
|
||||||
def _debugSyncDir() -> Path:
|
def _isDebugDumpEnabled() -> bool:
|
||||||
_DEBUG_SYNC_DIR.mkdir(parents=True, exist_ok=True)
|
"""Whether to write raw connector payloads to disk for offline inspection.
|
||||||
return _DEBUG_SYNC_DIR
|
|
||||||
|
|
||||||
|
Controlled by ``APP_DEBUG_ACCOUNTING_SYNC_ENABLED``. Default False so that
|
||||||
def _isDebugEnabled() -> bool:
|
INT/PROD never spend disk/IO/RAM on dumping 7-figure JSON files. Mirrors
|
||||||
|
the existing ``APP_DEBUG_CHAT_WORKFLOW_ENABLED`` pattern.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
from modules.shared.configuration import APP_CONFIG
|
from modules.shared.configuration import APP_CONFIG
|
||||||
return APP_CONFIG.get("APP_LOGGING_FILE_ENABLED", False) is True or str(APP_CONFIG.get("APP_LOGGING_FILE_ENABLED", "")).lower() == "true"
|
raw = APP_CONFIG.get("APP_DEBUG_ACCOUNTING_SYNC_ENABLED", False)
|
||||||
|
if isinstance(raw, bool):
|
||||||
|
return raw
|
||||||
|
return str(raw).strip().lower() in {"true", "1", "yes", "on"}
|
||||||
except Exception:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _dumpSyncData(tag: str, rows: list):
|
def _resolveDebugDumpDir() -> Optional[Path]:
|
||||||
"""Write raw connector data to a timestamped JSON file in local/debug/sync/."""
|
"""Resolve the debug dump directory. Returns None if dumping is disabled
|
||||||
if not _isDebugEnabled():
|
or the path could not be created."""
|
||||||
|
if not _isDebugDumpEnabled():
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
configured = APP_CONFIG.get("APP_DEBUG_ACCOUNTING_SYNC_DIR", None)
|
||||||
|
if not configured:
|
||||||
|
return None
|
||||||
|
path = Path(str(configured))
|
||||||
|
if not path.is_absolute():
|
||||||
|
gatewayDir = Path(__file__).resolve().parents[4]
|
||||||
|
path = gatewayDir / configured
|
||||||
|
path.mkdir(parents=True, exist_ok=True)
|
||||||
|
return path
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warning(f"Could not resolve debug dump dir: {ex}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _dumpSyncData(tag: str, rows: list) -> None:
|
||||||
|
"""Write raw connector data to a timestamped JSON file. No-op unless
|
||||||
|
``APP_DEBUG_ACCOUNTING_SYNC_ENABLED`` is true AND the configured dir
|
||||||
|
resolves."""
|
||||||
|
dumpDir = _resolveDebugDumpDir()
|
||||||
|
if dumpDir is None:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
d = _debugSyncDir()
|
|
||||||
ts = time.strftime("%Y%m%d-%H%M%S")
|
ts = time.strftime("%Y%m%d-%H%M%S")
|
||||||
path = d / f"{ts}_{tag}.json"
|
path = dumpDir / f"{ts}_{tag}.json"
|
||||||
serializable = []
|
serializable = []
|
||||||
for r in rows:
|
for r in rows:
|
||||||
if isinstance(r, dict):
|
if isinstance(r, dict):
|
||||||
|
|
@ -71,11 +109,25 @@ class AccountingDataSync:
|
||||||
mandateId: str,
|
mandateId: str,
|
||||||
dateFrom: Optional[str] = None,
|
dateFrom: Optional[str] = None,
|
||||||
dateTo: Optional[str] = None,
|
dateTo: Optional[str] = None,
|
||||||
|
progressCb: Optional[Callable[[int, Optional[str]], None]] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Run a full data import for a feature instance.
|
"""Run a full data import for a feature instance.
|
||||||
|
|
||||||
Returns a summary dict with counts per entity and any errors.
|
Returns a summary dict with counts per entity and any errors. All heavy
|
||||||
|
DB work is offloaded to a worker thread via ``asyncio.to_thread`` so
|
||||||
|
the event loop remains responsive for other requests.
|
||||||
|
|
||||||
|
``progressCb(percent, message)`` -- if provided, called at every phase
|
||||||
|
boundary so the UI poll on ``GET /api/jobs/{jobId}`` shows real
|
||||||
|
movement instead of jumping from 10 % to 100 %. Safe to omit.
|
||||||
"""
|
"""
|
||||||
|
def _progress(pct: int, msg: str) -> None:
|
||||||
|
if progressCb is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
progressCb(pct, msg)
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warning(f"progressCb failed at {pct}%: {ex}")
|
||||||
from modules.features.trustee.datamodelFeatureTrustee import (
|
from modules.features.trustee.datamodelFeatureTrustee import (
|
||||||
TrusteeAccountingConfig,
|
TrusteeAccountingConfig,
|
||||||
TrusteeDataAccount,
|
TrusteeDataAccount,
|
||||||
|
|
@ -109,9 +161,8 @@ class AccountingDataSync:
|
||||||
encryptedConfig = cfgRecord.get("encryptedConfig", "")
|
encryptedConfig = cfgRecord.get("encryptedConfig", "")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
|
||||||
plainJson = decryptValue(encryptedConfig)
|
plainJson = decryptValue(encryptedConfig)
|
||||||
connConfig = json.loads(plainJson) if plainJson else {}
|
connConfig = _json.loads(plainJson) if plainJson else {}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary["errors"].append(f"Failed to decrypt config: {e}")
|
summary["errors"].append(f"Failed to decrypt config: {e}")
|
||||||
return summary
|
return summary
|
||||||
|
|
@ -122,110 +173,117 @@ class AccountingDataSync:
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
||||||
logger.info(f"AccountingDataSync starting for {featureInstanceId}, connector={connectorType}, dateFrom={dateFrom}, dateTo={dateTo}")
|
logger.info(
|
||||||
|
f"AccountingDataSync starting for {featureInstanceId}, "
|
||||||
|
f"connector={connectorType}, dateFrom={dateFrom}, dateTo={dateTo}"
|
||||||
|
)
|
||||||
fetchedAccountNumbers: list = []
|
fetchedAccountNumbers: list = []
|
||||||
|
|
||||||
# 1) Chart of accounts
|
# ---- Phase 1: Chart of accounts ----
|
||||||
|
# Progress budget: 15-30 %. The fetch (15 %) is usually a single
|
||||||
|
# snappy API call; the persist step (30 %) is bulk-insert and finishes
|
||||||
|
# in <100 ms even for thousands of rows.
|
||||||
try:
|
try:
|
||||||
|
_progress(15, "Lade Kontenplan...")
|
||||||
charts = await connector.getChartOfAccounts(connConfig)
|
charts = await connector.getChartOfAccounts(connConfig)
|
||||||
_dumpSyncData("accounts", charts)
|
_dumpSyncData("accounts", charts)
|
||||||
fetchedAccountNumbers = [acc.accountNumber for acc in charts if acc.accountNumber]
|
fetchedAccountNumbers = [acc.accountNumber for acc in charts if acc.accountNumber]
|
||||||
self._clearTable(TrusteeDataAccount, featureInstanceId)
|
_progress(25, f"Speichere {len(charts)} Konten...")
|
||||||
for acc in charts:
|
written = await asyncio.to_thread(
|
||||||
self._if.db.recordCreate(TrusteeDataAccount, {
|
self._persistAccounts, charts, scope, featureInstanceId, TrusteeDataAccount
|
||||||
"accountNumber": acc.accountNumber,
|
)
|
||||||
"label": acc.label,
|
summary["accounts"] = written
|
||||||
"accountType": acc.accountType or "",
|
_progress(30, f"{written} Konten gespeichert.")
|
||||||
"currency": "CHF",
|
|
||||||
"isActive": True,
|
|
||||||
**scope,
|
|
||||||
})
|
|
||||||
summary["accounts"] = len(charts)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Import accounts failed: {e}", exc_info=True)
|
logger.error(f"Import accounts failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(f"Accounts: {e}")
|
summary["errors"].append(f"Accounts: {e}")
|
||||||
|
|
||||||
# 2) Journal entries + lines (pass already-fetched chart to avoid redundant API call)
|
# ---- Phase 2: Journal entries + lines ----
|
||||||
|
# Progress budget: 35-65 %. Usually the longest phase; the external
|
||||||
|
# API often paginates per account, so the fetch alone can take 30+ s.
|
||||||
try:
|
try:
|
||||||
rawEntries = await connector.getJournalEntries(connConfig, dateFrom=dateFrom, dateTo=dateTo, accountNumbers=fetchedAccountNumbers or None)
|
_progress(35, "Lade Journaleintraege vom Buchhaltungssystem...")
|
||||||
|
rawEntries = await connector.getJournalEntries(
|
||||||
|
connConfig,
|
||||||
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
accountNumbers=fetchedAccountNumbers or None,
|
||||||
|
)
|
||||||
_dumpSyncData("journalEntries", rawEntries)
|
_dumpSyncData("journalEntries", rawEntries)
|
||||||
self._clearTable(TrusteeDataJournalEntry, featureInstanceId)
|
_progress(60, f"Speichere {len(rawEntries)} Buchungssaetze...")
|
||||||
self._clearTable(TrusteeDataJournalLine, featureInstanceId)
|
entriesCount, linesCount = await asyncio.to_thread(
|
||||||
lineCount = 0
|
self._persistJournal, rawEntries, scope, featureInstanceId,
|
||||||
for raw in rawEntries:
|
TrusteeDataJournalEntry, TrusteeDataJournalLine,
|
||||||
import uuid
|
)
|
||||||
entryId = str(uuid.uuid4())
|
summary["journalEntries"] = entriesCount
|
||||||
self._if.db.recordCreate(TrusteeDataJournalEntry, {
|
summary["journalLines"] = linesCount
|
||||||
"id": entryId,
|
_progress(65, f"{entriesCount} Saetze + {linesCount} Buchungszeilen gespeichert.")
|
||||||
"externalId": raw.get("externalId"),
|
|
||||||
"bookingDate": raw.get("bookingDate"),
|
|
||||||
"reference": raw.get("reference"),
|
|
||||||
"description": raw.get("description", ""),
|
|
||||||
"currency": raw.get("currency", "CHF"),
|
|
||||||
"totalAmount": float(raw.get("totalAmount", 0)),
|
|
||||||
**scope,
|
|
||||||
})
|
|
||||||
for line in (raw.get("lines") or []):
|
|
||||||
self._if.db.recordCreate(TrusteeDataJournalLine, {
|
|
||||||
"journalEntryId": entryId,
|
|
||||||
"accountNumber": line.get("accountNumber", ""),
|
|
||||||
"debitAmount": float(line.get("debitAmount", 0)),
|
|
||||||
"creditAmount": float(line.get("creditAmount", 0)),
|
|
||||||
"currency": line.get("currency", "CHF"),
|
|
||||||
"taxCode": line.get("taxCode"),
|
|
||||||
"costCenter": line.get("costCenter"),
|
|
||||||
"description": line.get("description", ""),
|
|
||||||
**scope,
|
|
||||||
})
|
|
||||||
lineCount += 1
|
|
||||||
summary["journalEntries"] = len(rawEntries)
|
|
||||||
summary["journalLines"] = lineCount
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Import journal entries failed: {e}")
|
logger.error(f"Import journal entries failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(f"Journal entries: {e}")
|
summary["errors"].append(f"Journal entries: {e}")
|
||||||
|
|
||||||
# 3) Contacts (customers + vendors)
|
# ---- Phase 3: Contacts (customers + vendors) ----
|
||||||
|
# Progress budget: 70-85 %. Two quick API calls + one bulk-insert.
|
||||||
try:
|
try:
|
||||||
self._clearTable(TrusteeDataContact, featureInstanceId)
|
_progress(70, "Lade Kunden...")
|
||||||
contactCount = 0
|
|
||||||
|
|
||||||
customers = await connector.getCustomers(connConfig)
|
customers = await connector.getCustomers(connConfig)
|
||||||
_dumpSyncData("customers", customers)
|
_dumpSyncData("customers", customers)
|
||||||
for c in customers:
|
_progress(78, "Lade Lieferanten...")
|
||||||
self._if.db.recordCreate(TrusteeDataContact, self._mapContact(c, "customer", scope))
|
|
||||||
contactCount += 1
|
|
||||||
|
|
||||||
vendors = await connector.getVendors(connConfig)
|
vendors = await connector.getVendors(connConfig)
|
||||||
_dumpSyncData("vendors", vendors)
|
_dumpSyncData("vendors", vendors)
|
||||||
for v in vendors:
|
_progress(82, f"Speichere {len(customers) + len(vendors)} Kontakte...")
|
||||||
self._if.db.recordCreate(TrusteeDataContact, self._mapContact(v, "vendor", scope))
|
contactCount = await asyncio.to_thread(
|
||||||
contactCount += 1
|
self._persistContacts, customers, vendors, scope,
|
||||||
|
featureInstanceId, TrusteeDataContact,
|
||||||
|
)
|
||||||
summary["contacts"] = contactCount
|
summary["contacts"] = contactCount
|
||||||
|
_progress(85, f"{contactCount} Kontakte gespeichert.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Import contacts failed: {e}", exc_info=True)
|
logger.error(f"Import contacts failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(f"Contacts: {e}")
|
summary["errors"].append(f"Contacts: {e}")
|
||||||
|
|
||||||
# 4) Compute account balances from journal lines
|
# ---- Phase 4: Compute account balances ----
|
||||||
|
# Progress budget: 90-95 %. Pure DB aggregation, no external calls.
|
||||||
try:
|
try:
|
||||||
self._clearTable(TrusteeDataAccountBalance, featureInstanceId)
|
_progress(90, "Berechne Kontensaldi...")
|
||||||
balanceCount = self._computeBalances(featureInstanceId, mandateId)
|
balanceCount = await asyncio.to_thread(
|
||||||
|
self._persistBalances, featureInstanceId, mandateId,
|
||||||
|
TrusteeDataJournalEntry, TrusteeDataJournalLine, TrusteeDataAccountBalance,
|
||||||
|
)
|
||||||
summary["accountBalances"] = balanceCount
|
summary["accountBalances"] = balanceCount
|
||||||
|
_progress(95, f"{balanceCount} Saldi berechnet.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Compute balances failed: {e}")
|
logger.error(f"Compute balances failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(f"Balances: {e}")
|
summary["errors"].append(f"Balances: {e}")
|
||||||
|
|
||||||
# Update config with last import timestamp
|
cfgId = cfgRecord.get("id")
|
||||||
try:
|
if cfgId:
|
||||||
cfgId = cfgRecord.get("id")
|
corePayload = {
|
||||||
if cfgId:
|
"lastSyncAt": time.time(),
|
||||||
self._if.db.recordModify(TrusteeAccountingConfig, cfgId, {
|
"lastSyncStatus": "success" if not summary["errors"] else "partial",
|
||||||
"lastSyncAt": time.time(),
|
"lastSyncErrorMessage": "; ".join(summary["errors"])[:500] if summary["errors"] else None,
|
||||||
"lastSyncStatus": "success" if not summary["errors"] else "partial",
|
}
|
||||||
"lastSyncErrorMessage": "; ".join(summary["errors"])[:500] if summary["errors"] else None,
|
try:
|
||||||
})
|
self._if.db.recordModify(TrusteeAccountingConfig, cfgId, corePayload)
|
||||||
except Exception:
|
except Exception as coreErr:
|
||||||
pass
|
logger.exception(f"AccountingDataSync: failed to write core lastSync* fields for cfg {cfgId}: {coreErr}")
|
||||||
|
summary["errors"].append(f"Persist lastSync core: {coreErr}")
|
||||||
|
extPayload = {
|
||||||
|
"lastSyncDateFrom": dateFrom,
|
||||||
|
"lastSyncDateTo": dateTo,
|
||||||
|
"lastSyncCounts": {
|
||||||
|
"accounts": int(summary.get("accounts", 0)),
|
||||||
|
"journalEntries": int(summary.get("journalEntries", 0)),
|
||||||
|
"journalLines": int(summary.get("journalLines", 0)),
|
||||||
|
"contacts": int(summary.get("contacts", 0)),
|
||||||
|
"accountBalances": int(summary.get("accountBalances", 0)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
self._if.db.recordModify(TrusteeAccountingConfig, cfgId, extPayload)
|
||||||
|
except Exception as extErr:
|
||||||
|
logger.exception(f"AccountingDataSync: failed to write extended lastSync* fields for cfg {cfgId}: {extErr}")
|
||||||
|
summary["errors"].append(f"Persist lastSync ext: {extErr}")
|
||||||
|
|
||||||
summary["finishedAt"] = time.time()
|
summary["finishedAt"] = time.time()
|
||||||
summary["durationSeconds"] = round(summary["finishedAt"] - summary["startedAt"], 1)
|
summary["durationSeconds"] = round(summary["finishedAt"] - summary["startedAt"], 1)
|
||||||
|
|
@ -238,6 +296,210 @@ class AccountingDataSync:
|
||||||
)
|
)
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
|
# ===== Sync persistence helpers (run inside asyncio.to_thread) =====
|
||||||
|
|
||||||
|
def _persistAccounts(self, charts: list, scope: Dict[str, Any],
|
||||||
|
featureInstanceId: str, modelAccount: Type) -> int:
|
||||||
|
"""Bulk-replace chart of accounts for one feature instance."""
|
||||||
|
t0 = time.time()
|
||||||
|
self._bulkClear(modelAccount, featureInstanceId)
|
||||||
|
rows = [{
|
||||||
|
"accountNumber": acc.accountNumber,
|
||||||
|
"label": acc.label,
|
||||||
|
"accountType": acc.accountType or "",
|
||||||
|
"currency": "CHF",
|
||||||
|
"isActive": True,
|
||||||
|
**scope,
|
||||||
|
} for acc in charts]
|
||||||
|
n = self._bulkCreate(modelAccount, rows)
|
||||||
|
logger.info(f"Persisted {n} accounts for {featureInstanceId} in {time.time() - t0:.1f}s")
|
||||||
|
return n
|
||||||
|
|
||||||
|
def _persistJournal(self, rawEntries: list, scope: Dict[str, Any],
|
||||||
|
featureInstanceId: str, modelEntry: Type, modelLine: Type) -> tuple:
|
||||||
|
"""Bulk-replace journal entries + journal lines for one feature instance.
|
||||||
|
|
||||||
|
We pre-build the line rows in memory keyed by the freshly minted entryId
|
||||||
|
so a single ``execute_values`` call can persist all of them.
|
||||||
|
"""
|
||||||
|
import uuid as _uuid
|
||||||
|
t0 = time.time()
|
||||||
|
self._bulkClear(modelEntry, featureInstanceId)
|
||||||
|
self._bulkClear(modelLine, featureInstanceId)
|
||||||
|
|
||||||
|
entryRows: List[Dict[str, Any]] = []
|
||||||
|
lineRows: List[Dict[str, Any]] = []
|
||||||
|
for raw in rawEntries:
|
||||||
|
entryId = str(_uuid.uuid4())
|
||||||
|
entryRows.append({
|
||||||
|
"id": entryId,
|
||||||
|
"externalId": raw.get("externalId"),
|
||||||
|
"bookingDate": raw.get("bookingDate"),
|
||||||
|
"reference": raw.get("reference"),
|
||||||
|
"description": raw.get("description", ""),
|
||||||
|
"currency": raw.get("currency", "CHF"),
|
||||||
|
"totalAmount": float(raw.get("totalAmount", 0)),
|
||||||
|
**scope,
|
||||||
|
})
|
||||||
|
for line in (raw.get("lines") or []):
|
||||||
|
lineRows.append({
|
||||||
|
"journalEntryId": entryId,
|
||||||
|
"accountNumber": line.get("accountNumber", ""),
|
||||||
|
"debitAmount": float(line.get("debitAmount", 0)),
|
||||||
|
"creditAmount": float(line.get("creditAmount", 0)),
|
||||||
|
"currency": line.get("currency", "CHF"),
|
||||||
|
"taxCode": line.get("taxCode"),
|
||||||
|
"costCenter": line.get("costCenter"),
|
||||||
|
"description": line.get("description", ""),
|
||||||
|
**scope,
|
||||||
|
})
|
||||||
|
if len(entryRows) % _HEARTBEAT_EVERY == 0:
|
||||||
|
logger.info(
|
||||||
|
f"Journal build progress: {len(entryRows)}/{len(rawEntries)} entries, "
|
||||||
|
f"{len(lineRows)} lines so far"
|
||||||
|
)
|
||||||
|
|
||||||
|
entriesCount = self._bulkCreate(modelEntry, entryRows)
|
||||||
|
linesCount = self._bulkCreate(modelLine, lineRows)
|
||||||
|
logger.info(
|
||||||
|
f"Persisted {entriesCount} entries + {linesCount} lines for "
|
||||||
|
f"{featureInstanceId} in {time.time() - t0:.1f}s"
|
||||||
|
)
|
||||||
|
return entriesCount, linesCount
|
||||||
|
|
||||||
|
def _persistContacts(self, customers: list, vendors: list, scope: Dict[str, Any],
|
||||||
|
featureInstanceId: str, modelContact: Type) -> int:
|
||||||
|
"""Bulk-replace contacts (customers + vendors) for one feature instance."""
|
||||||
|
t0 = time.time()
|
||||||
|
self._bulkClear(modelContact, featureInstanceId)
|
||||||
|
rows = [self._mapContact(c, "customer", scope) for c in customers]
|
||||||
|
rows += [self._mapContact(v, "vendor", scope) for v in vendors]
|
||||||
|
n = self._bulkCreate(modelContact, rows)
|
||||||
|
logger.info(f"Persisted {n} contacts for {featureInstanceId} in {time.time() - t0:.1f}s")
|
||||||
|
return n
|
||||||
|
|
||||||
|
def _persistBalances(self, featureInstanceId: str, mandateId: str,
|
||||||
|
modelEntry: Type, modelLine: Type, modelBalance: Type) -> int:
|
||||||
|
"""Re-aggregate journal lines into monthly + annual balances."""
|
||||||
|
t0 = time.time()
|
||||||
|
self._bulkClear(modelBalance, featureInstanceId)
|
||||||
|
|
||||||
|
entries = self._if.db.getRecordset(
|
||||||
|
modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
) or []
|
||||||
|
entryDates: Dict[str, str] = {}
|
||||||
|
for e in entries:
|
||||||
|
eid = e.get("id") if isinstance(e, dict) else getattr(e, "id", None)
|
||||||
|
bdate = e.get("bookingDate") if isinstance(e, dict) else getattr(e, "bookingDate", None)
|
||||||
|
if eid and bdate:
|
||||||
|
entryDates[eid] = bdate
|
||||||
|
|
||||||
|
lines = self._if.db.getRecordset(
|
||||||
|
modelLine, recordFilter={"featureInstanceId": featureInstanceId},
|
||||||
|
) or []
|
||||||
|
|
||||||
|
buckets: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
|
||||||
|
for ln in lines:
|
||||||
|
if isinstance(ln, dict):
|
||||||
|
jeid = ln.get("journalEntryId", "")
|
||||||
|
accNo = ln.get("accountNumber", "")
|
||||||
|
debit = float(ln.get("debitAmount", 0))
|
||||||
|
credit = float(ln.get("creditAmount", 0))
|
||||||
|
else:
|
||||||
|
jeid = getattr(ln, "journalEntryId", "")
|
||||||
|
accNo = getattr(ln, "accountNumber", "")
|
||||||
|
debit = float(getattr(ln, "debitAmount", 0))
|
||||||
|
credit = float(getattr(ln, "creditAmount", 0))
|
||||||
|
|
||||||
|
bdate = entryDates.get(jeid, "")
|
||||||
|
if not accNo or not bdate:
|
||||||
|
continue
|
||||||
|
parts = bdate.split("-")
|
||||||
|
if len(parts) < 2:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
year = int(parts[0])
|
||||||
|
month = int(parts[1])
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
buckets[(accNo, year, month)]["debit"] += debit
|
||||||
|
buckets[(accNo, year, month)]["credit"] += credit
|
||||||
|
buckets[(accNo, year, 0)]["debit"] += debit
|
||||||
|
buckets[(accNo, year, 0)]["credit"] += credit
|
||||||
|
|
||||||
|
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
||||||
|
rows = [{
|
||||||
|
"accountNumber": accNo,
|
||||||
|
"periodYear": year,
|
||||||
|
"periodMonth": month,
|
||||||
|
"openingBalance": 0.0,
|
||||||
|
"debitTotal": round(totals["debit"], 2),
|
||||||
|
"creditTotal": round(totals["credit"], 2),
|
||||||
|
"closingBalance": round(totals["debit"] - totals["credit"], 2),
|
||||||
|
"currency": "CHF",
|
||||||
|
**scope,
|
||||||
|
} for (accNo, year, month), totals in buckets.items()]
|
||||||
|
n = self._bulkCreate(modelBalance, rows)
|
||||||
|
logger.info(
|
||||||
|
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s"
|
||||||
|
)
|
||||||
|
return n
|
||||||
|
|
||||||
|
# ===== Low-level bulk helpers =====
|
||||||
|
|
||||||
|
def _bulkClear(self, model: Type, featureInstanceId: str) -> int:
|
||||||
|
"""Delete every row for this feature instance in a single statement."""
|
||||||
|
try:
|
||||||
|
return self._if.db.recordDeleteWhere(
|
||||||
|
model, {"featureInstanceId": featureInstanceId}
|
||||||
|
)
|
||||||
|
except AttributeError:
|
||||||
|
# Backwards-compatible path if the connector hasn't been upgraded
|
||||||
|
# yet. Logs a warning so we notice in dev/CI.
|
||||||
|
logger.warning(
|
||||||
|
"DatabaseConnector.recordDeleteWhere missing — falling back to slow per-row delete for %s",
|
||||||
|
model.__name__,
|
||||||
|
)
|
||||||
|
records = self._if.db.getRecordset(
|
||||||
|
model, recordFilter={"featureInstanceId": featureInstanceId}
|
||||||
|
) or []
|
||||||
|
count = 0
|
||||||
|
for r in records:
|
||||||
|
rid = r.get("id") if isinstance(r, dict) else getattr(r, "id", None)
|
||||||
|
if rid:
|
||||||
|
try:
|
||||||
|
self._if.db.recordDelete(model, rid)
|
||||||
|
count += 1
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return count
|
||||||
|
|
||||||
|
def _bulkCreate(self, model: Type, rows: List[Dict[str, Any]]) -> int:
|
||||||
|
"""Insert all rows in a single transaction. Falls back to per-row
|
||||||
|
insert only if the connector lacks ``recordCreateBulk`` (legacy)."""
|
||||||
|
if not rows:
|
||||||
|
return 0
|
||||||
|
try:
|
||||||
|
return self._if.db.recordCreateBulk(model, rows)
|
||||||
|
except AttributeError:
|
||||||
|
logger.warning(
|
||||||
|
"DatabaseConnector.recordCreateBulk missing — falling back to slow per-row insert for %s",
|
||||||
|
model.__name__,
|
||||||
|
)
|
||||||
|
n = 0
|
||||||
|
for i, r in enumerate(rows, start=1):
|
||||||
|
try:
|
||||||
|
self._if.db.recordCreate(model, r)
|
||||||
|
n += 1
|
||||||
|
except Exception as ex:
|
||||||
|
logger.warning(f"Per-row insert failed at {i}/{len(rows)}: {ex}")
|
||||||
|
if i % _HEARTBEAT_EVERY == 0:
|
||||||
|
logger.info(f"Per-row insert progress: {i}/{len(rows)} rows ({model.__name__})")
|
||||||
|
return n
|
||||||
|
|
||||||
|
# ===== Field helpers =====
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _safeStr(val: Any) -> str:
|
def _safeStr(val: Any) -> str:
|
||||||
"""Convert a value to a safe string for DB storage, collapsing nested dicts/lists."""
|
"""Convert a value to a safe string for DB storage, collapsing nested dicts/lists."""
|
||||||
|
|
@ -269,84 +531,3 @@ class AccountingDataSync:
|
||||||
"vatNumber": s(raw.get("vat_identifier") or raw.get("vatNumber") or ""),
|
"vatNumber": s(raw.get("vat_identifier") or raw.get("vatNumber") or ""),
|
||||||
**scope,
|
**scope,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _clearTable(self, model, featureInstanceId: str):
|
|
||||||
"""Delete all records for this feature instance from a TrusteeData* table."""
|
|
||||||
records = self._if.db.getRecordset(model, recordFilter={"featureInstanceId": featureInstanceId})
|
|
||||||
for r in (records or []):
|
|
||||||
rid = r.get("id") if isinstance(r, dict) else getattr(r, "id", None)
|
|
||||||
if rid:
|
|
||||||
try:
|
|
||||||
self._if.db.recordDelete(model, rid)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _computeBalances(self, featureInstanceId: str, mandateId: str) -> int:
|
|
||||||
"""Aggregate journal lines into monthly + annual account balances."""
|
|
||||||
from modules.features.trustee.datamodelFeatureTrustee import (
|
|
||||||
TrusteeDataJournalEntry,
|
|
||||||
TrusteeDataJournalLine,
|
|
||||||
TrusteeDataAccountBalance,
|
|
||||||
)
|
|
||||||
|
|
||||||
entries = self._if.db.getRecordset(
|
|
||||||
TrusteeDataJournalEntry,
|
|
||||||
recordFilter={"featureInstanceId": featureInstanceId},
|
|
||||||
) or []
|
|
||||||
entryDates = {}
|
|
||||||
for e in entries:
|
|
||||||
eid = e.get("id") if isinstance(e, dict) else getattr(e, "id", None)
|
|
||||||
bdate = e.get("bookingDate") if isinstance(e, dict) else getattr(e, "bookingDate", None)
|
|
||||||
if eid and bdate:
|
|
||||||
entryDates[eid] = bdate
|
|
||||||
|
|
||||||
lines = self._if.db.getRecordset(
|
|
||||||
TrusteeDataJournalLine,
|
|
||||||
recordFilter={"featureInstanceId": featureInstanceId},
|
|
||||||
) or []
|
|
||||||
|
|
||||||
# key: (accountNumber, year, month)
|
|
||||||
buckets: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
|
|
||||||
for ln in lines:
|
|
||||||
if isinstance(ln, dict):
|
|
||||||
jeid = ln.get("journalEntryId", "")
|
|
||||||
accNo = ln.get("accountNumber", "")
|
|
||||||
debit = float(ln.get("debitAmount", 0))
|
|
||||||
credit = float(ln.get("creditAmount", 0))
|
|
||||||
else:
|
|
||||||
jeid = getattr(ln, "journalEntryId", "")
|
|
||||||
accNo = getattr(ln, "accountNumber", "")
|
|
||||||
debit = float(getattr(ln, "debitAmount", 0))
|
|
||||||
credit = float(getattr(ln, "creditAmount", 0))
|
|
||||||
|
|
||||||
bdate = entryDates.get(jeid, "")
|
|
||||||
if not accNo or not bdate:
|
|
||||||
continue
|
|
||||||
parts = bdate.split("-")
|
|
||||||
if len(parts) < 2:
|
|
||||||
continue
|
|
||||||
year = int(parts[0])
|
|
||||||
month = int(parts[1])
|
|
||||||
|
|
||||||
buckets[(accNo, year, month)]["debit"] += debit
|
|
||||||
buckets[(accNo, year, month)]["credit"] += credit
|
|
||||||
buckets[(accNo, year, 0)]["debit"] += debit
|
|
||||||
buckets[(accNo, year, 0)]["credit"] += credit
|
|
||||||
|
|
||||||
count = 0
|
|
||||||
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
|
||||||
for (accNo, year, month), totals in buckets.items():
|
|
||||||
closing = totals["debit"] - totals["credit"]
|
|
||||||
self._if.db.recordCreate(TrusteeDataAccountBalance, {
|
|
||||||
"accountNumber": accNo,
|
|
||||||
"periodYear": year,
|
|
||||||
"periodMonth": month,
|
|
||||||
"openingBalance": 0.0,
|
|
||||||
"debitTotal": round(totals["debit"], 2),
|
|
||||||
"creditTotal": round(totals["credit"], 2),
|
|
||||||
"closingBalance": round(closing, 2),
|
|
||||||
"currency": "CHF",
|
|
||||||
**scope,
|
|
||||||
})
|
|
||||||
count += 1
|
|
||||||
return count
|
|
||||||
|
|
|
||||||
|
|
@ -47,6 +47,10 @@ class AccountingConnectorRma(BaseAccountingConnector):
|
||||||
fieldType="text",
|
fieldType="text",
|
||||||
secret=False,
|
secret=False,
|
||||||
placeholder="https://service.runmyaccounts.com/api/latest/clients/",
|
placeholder="https://service.runmyaccounts.com/api/latest/clients/",
|
||||||
|
suggestions=[
|
||||||
|
"https://service.runmyaccounts.com/api/latest/clients/",
|
||||||
|
"https://service.int.runmyaccounts.com/api/latest/clients/",
|
||||||
|
],
|
||||||
),
|
),
|
||||||
ConnectorConfigField(
|
ConnectorConfigField(
|
||||||
key="clientName",
|
key="clientName",
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
"""Trustee models: TrusteeOrganisation, TrusteeRole, TrusteeAccess, TrusteeContract, TrusteeDocument, TrusteePosition."""
|
"""Trustee models: TrusteeOrganisation, TrusteeRole, TrusteeAccess, TrusteeContract, TrusteeDocument, TrusteePosition."""
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional, Dict
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from modules.datamodels.datamodelBase import PowerOnModel
|
from modules.datamodels.datamodelBase import PowerOnModel
|
||||||
|
|
@ -526,7 +526,8 @@ class TrusteePosition(PowerOnModel):
|
||||||
"label": "Buchungsbetrag",
|
"label": "Buchungsbetrag",
|
||||||
"frontend_type": "number",
|
"frontend_type": "number",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True
|
"frontend_required": True,
|
||||||
|
"frontend_format": "R:#'###.00",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
originalCurrency: str = Field(
|
originalCurrency: str = Field(
|
||||||
|
|
@ -551,7 +552,8 @@ class TrusteePosition(PowerOnModel):
|
||||||
"label": "Originalbetrag",
|
"label": "Originalbetrag",
|
||||||
"frontend_type": "number",
|
"frontend_type": "number",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True
|
"frontend_required": True,
|
||||||
|
"frontend_format": "R:#'###.00",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
vatPercentage: float = Field(
|
vatPercentage: float = Field(
|
||||||
|
|
@ -561,7 +563,8 @@ class TrusteePosition(PowerOnModel):
|
||||||
"label": "MwSt-Prozentsatz",
|
"label": "MwSt-Prozentsatz",
|
||||||
"frontend_type": "number",
|
"frontend_type": "number",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False
|
"frontend_required": False,
|
||||||
|
"frontend_format": "R:0.00",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
vatAmount: float = Field(
|
vatAmount: float = Field(
|
||||||
|
|
@ -571,7 +574,8 @@ class TrusteePosition(PowerOnModel):
|
||||||
"label": "MwSt-Betrag",
|
"label": "MwSt-Betrag",
|
||||||
"frontend_type": "number",
|
"frontend_type": "number",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False
|
"frontend_required": False,
|
||||||
|
"frontend_format": "R:#'###.00",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
debitAccountNumber: Optional[str] = Field(
|
debitAccountNumber: Optional[str] = Field(
|
||||||
|
|
@ -750,7 +754,15 @@ class TrusteeDataJournalEntry(PowerOnModel):
|
||||||
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
|
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
|
||||||
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
|
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
|
||||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||||
totalAmount: float = Field(default=0.0, description="Total amount of entry", json_schema_extra={"label": "Betrag"})
|
totalAmount: float = Field(
|
||||||
|
default=0.0,
|
||||||
|
description="Total amount of entry",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Betrag",
|
||||||
|
# Right-aligned amount with Swiss thousands separator and 2 decimals.
|
||||||
|
"frontend_format": "R:#'###.00",
|
||||||
|
},
|
||||||
|
)
|
||||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||||
|
|
||||||
|
|
@ -760,8 +772,8 @@ class TrusteeDataJournalLine(PowerOnModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
|
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
|
||||||
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
||||||
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll"})
|
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"})
|
||||||
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben"})
|
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"})
|
||||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||||
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
|
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
|
||||||
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
|
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
|
||||||
|
|
@ -794,10 +806,10 @@ class TrusteeDataAccountBalance(PowerOnModel):
|
||||||
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
||||||
periodYear: int = Field(description="Fiscal year", json_schema_extra={"label": "Jahr"})
|
periodYear: int = Field(description="Fiscal year", json_schema_extra={"label": "Jahr"})
|
||||||
periodMonth: int = Field(default=0, description="Month (1-12); 0 = annual total", json_schema_extra={"label": "Monat"})
|
periodMonth: int = Field(default=0, description="Month (1-12); 0 = annual total", json_schema_extra={"label": "Monat"})
|
||||||
openingBalance: float = Field(default=0.0, json_schema_extra={"label": "Eröffnungssaldo"})
|
openingBalance: float = Field(default=0.0, json_schema_extra={"label": "Eröffnungssaldo", "frontend_format": "R:#'###.00"})
|
||||||
debitTotal: float = Field(default=0.0, json_schema_extra={"label": "Soll-Umsatz"})
|
debitTotal: float = Field(default=0.0, json_schema_extra={"label": "Soll-Umsatz", "frontend_format": "R:#'###.00"})
|
||||||
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz"})
|
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"})
|
||||||
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo"})
|
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"})
|
||||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||||
|
|
@ -818,6 +830,9 @@ class TrusteeAccountingConfig(PowerOnModel):
|
||||||
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation"})
|
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation"})
|
||||||
lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"})
|
lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"})
|
||||||
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
|
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
|
||||||
|
lastSyncDateFrom: Optional[str] = Field(default=None, description="dateFrom (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von"})
|
||||||
|
lastSyncDateTo: Optional[str] = Field(default=None, description="dateTo (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis"})
|
||||||
|
lastSyncCounts: Optional[Dict[str, int]] = Field(default=None, description="Per-entity counts of the last import (accounts, journalEntries, journalLines, contacts, accountBalances)", json_schema_extra={"label": "Letzte Import-Zaehler"})
|
||||||
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
|
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
|
||||||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
|
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
|
||||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||||
|
|
|
||||||
|
|
@ -8,11 +8,13 @@ Handles feature initialization and RBAC catalog registration.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Feature metadata
|
# Feature metadata
|
||||||
FEATURE_CODE = "trustee"
|
FEATURE_CODE = "trustee"
|
||||||
FEATURE_LABEL = "Treuhand"
|
FEATURE_LABEL = t("Treuhand", context="UI")
|
||||||
FEATURE_ICON = "mdi-briefcase"
|
FEATURE_ICON = "mdi-briefcase"
|
||||||
|
|
||||||
# UI Objects for RBAC catalog
|
# UI Objects for RBAC catalog
|
||||||
|
|
@ -20,122 +22,155 @@ FEATURE_ICON = "mdi-briefcase"
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.dashboard",
|
"objectKey": "ui.feature.trustee.dashboard",
|
||||||
"label": "Dashboard",
|
"label": t("Dashboard", context="UI"),
|
||||||
"meta": {"area": "dashboard"}
|
"meta": {"area": "dashboard"}
|
||||||
},
|
},
|
||||||
|
# Note: ui.feature.trustee.positions and .documents removed.
|
||||||
|
# Positionen and Dokumente are now consolidated tabs inside the
|
||||||
|
# ui.feature.trustee.data-tables view (TrusteeDataTablesView).
|
||||||
|
# Data-level RBAC (data.feature.trustee.TrusteePosition / .TrusteeDocument)
|
||||||
|
# remains and continues to gate per-row access.
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.positions",
|
"objectKey": "ui.feature.trustee.data-tables",
|
||||||
"label": "Positionen",
|
"label": t("Daten-Tabellen", context="UI"),
|
||||||
"meta": {"area": "positions"}
|
"meta": {"area": "data-tables"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.documents",
|
"objectKey": "ui.feature.trustee.import-process",
|
||||||
"label": "Dokumente",
|
"label": t("Import & Verarbeitung", context="UI"),
|
||||||
"meta": {"area": "documents"}
|
"meta": {"area": "import-process"}
|
||||||
},
|
|
||||||
{
|
|
||||||
"objectKey": "ui.feature.trustee.expense-import",
|
|
||||||
"label": "Spesen Import",
|
|
||||||
"meta": {"area": "expense-import"}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"objectKey": "ui.feature.trustee.scan-upload",
|
|
||||||
"label": "Scannen / Hochladen",
|
|
||||||
"meta": {"area": "scan-upload"}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.analyse",
|
"objectKey": "ui.feature.trustee.analyse",
|
||||||
"label": "Analyse & Reporting",
|
"label": t("Analyse & Reporting", context="UI"),
|
||||||
"meta": {"area": "analyse"}
|
"meta": {"area": "analyse"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.abschluss",
|
"objectKey": "ui.feature.trustee.abschluss",
|
||||||
"label": "Abschluss & Prüfung",
|
"label": t("Abschluss & Prüfung", context="UI"),
|
||||||
"meta": {"area": "abschluss"}
|
"meta": {"area": "abschluss"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.settings",
|
"objectKey": "ui.feature.trustee.settings",
|
||||||
"label": "Buchhaltungs-Einstellungen",
|
"label": t("Buchhaltungs-Einstellungen", context="UI"),
|
||||||
"meta": {"area": "settings", "admin_only": True}
|
"meta": {"area": "settings", "admin_only": True}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.trustee.instance-roles",
|
"objectKey": "ui.feature.trustee.instance-roles",
|
||||||
"label": "Instanz-Rollen & Berechtigungen",
|
"label": t("Instanz-Rollen & Berechtigungen", context="UI"),
|
||||||
"meta": {"area": "admin", "admin_only": True}
|
"meta": {"area": "admin", "admin_only": True}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
# DATA Objects for RBAC catalog (tables/entities)
|
# DATA Objects for RBAC catalog (tables/entities)
|
||||||
# Used for AccessRules on data-level permissions
|
# Used for AccessRules on data-level permissions.
|
||||||
|
# Architecture note: a feature instance IS the organisation. There is no
|
||||||
|
# TrusteeOrganisation parent grouping in the UDB — all tables are scoped
|
||||||
|
# to the feature instance via featureInstanceId.
|
||||||
DATA_OBJECTS = [
|
DATA_OBJECTS = [
|
||||||
|
# ── Categorical Groups (UDB folders) ─────────────────────────────────────
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeOrganisation",
|
"objectKey": "data.feature.trustee.localData",
|
||||||
"label": "Organisation",
|
"label": t("Lokale Daten", context="UI"),
|
||||||
"meta": {
|
"meta": {"isGroup": True}
|
||||||
"table": "TrusteeOrganisation",
|
|
||||||
"fields": ["id", "label", "enabled"],
|
|
||||||
"isParent": True,
|
|
||||||
"displayFields": ["label"],
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.trustee.config",
|
||||||
|
"label": t("Konfiguration", context="UI"),
|
||||||
|
"meta": {"isGroup": True}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"objectKey": "data.feature.trustee.accountingData",
|
||||||
|
"label": t("Daten aus Buchhaltungssystem", context="UI"),
|
||||||
|
"meta": {"isGroup": True}
|
||||||
|
},
|
||||||
|
# ── Lokale Daten ─────────────────────────────────────────────────────────
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteePosition",
|
"objectKey": "data.feature.trustee.TrusteePosition",
|
||||||
"label": "Position",
|
"label": t("Position", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "TrusteePosition",
|
"table": "TrusteePosition",
|
||||||
"fields": ["id", "label", "description", "organisationId"],
|
"group": "data.feature.trustee.localData",
|
||||||
"parentTable": "TrusteeOrganisation",
|
"fields": ["id", "valuta", "company", "desc", "bookingAmount", "bookingCurrency", "debitAccountNumber", "creditAccountNumber"],
|
||||||
"parentKey": "organisationId",
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDocument",
|
"objectKey": "data.feature.trustee.TrusteeDocument",
|
||||||
"label": "Dokument",
|
"label": t("Dokument", context="UI"),
|
||||||
"meta": {"table": "TrusteeDocument", "fields": ["id", "filename", "mimeType", "fileSize", "uploadDate"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDocument",
|
||||||
|
"group": "data.feature.trustee.localData",
|
||||||
|
"fields": ["id", "documentName", "documentMimeType", "documentType", "sourceType"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
# ── Konfiguration ────────────────────────────────────────────────────────
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeAccountingConfig",
|
"objectKey": "data.feature.trustee.TrusteeAccountingConfig",
|
||||||
"label": "Buchhaltungs-Konfiguration",
|
"label": t("Buchhaltungs-Verbindung", context="UI"),
|
||||||
"meta": {
|
"meta": {
|
||||||
"table": "TrusteeAccountingConfig",
|
"table": "TrusteeAccountingConfig",
|
||||||
"fields": ["id", "connectorType", "displayLabel", "encryptedConfig", "isActive"],
|
"group": "data.feature.trustee.config",
|
||||||
"parentTable": "TrusteeOrganisation",
|
"fields": ["id", "connectorType", "displayLabel", "isActive", "lastSyncAt", "lastSyncStatus"],
|
||||||
"parentKey": "organisationId",
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeAccountingSync",
|
"objectKey": "data.feature.trustee.TrusteeAccountingSync",
|
||||||
"label": "Buchhaltungs-Synchronisation",
|
"label": t("Sync-Protokoll", context="UI"),
|
||||||
"meta": {"table": "TrusteeAccountingSync", "fields": ["id", "positionId", "syncStatus", "externalId"]}
|
"meta": {
|
||||||
|
"table": "TrusteeAccountingSync",
|
||||||
|
"group": "data.feature.trustee.config",
|
||||||
|
"fields": ["id", "positionId", "syncStatus", "externalId"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
# ── Daten aus Buchhaltungssystem ─────────────────────────────────────────
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDataAccount",
|
"objectKey": "data.feature.trustee.TrusteeDataAccount",
|
||||||
"label": "Kontenplan (Sync)",
|
"label": t("Kontenplan", context="UI"),
|
||||||
"meta": {"table": "TrusteeDataAccount", "fields": ["id", "accountNumber", "label", "accountType", "accountGroup", "currency", "isActive"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDataAccount",
|
||||||
|
"group": "data.feature.trustee.accountingData",
|
||||||
|
"fields": ["id", "accountNumber", "label", "accountType", "accountGroup", "currency", "isActive"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDataJournalEntry",
|
"objectKey": "data.feature.trustee.TrusteeDataJournalEntry",
|
||||||
"label": "Buchungen (Sync)",
|
"label": t("Buchungen", context="UI"),
|
||||||
"meta": {"table": "TrusteeDataJournalEntry", "fields": ["id", "externalId", "bookingDate", "reference", "description", "currency", "totalAmount"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDataJournalEntry",
|
||||||
|
"group": "data.feature.trustee.accountingData",
|
||||||
|
"fields": ["id", "externalId", "bookingDate", "reference", "description", "currency", "totalAmount"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDataJournalLine",
|
"objectKey": "data.feature.trustee.TrusteeDataJournalLine",
|
||||||
"label": "Buchungszeilen (Sync)",
|
"label": t("Buchungszeilen", context="UI"),
|
||||||
"meta": {"table": "TrusteeDataJournalLine", "fields": ["id", "journalEntryId", "accountNumber", "debitAmount", "creditAmount", "currency", "taxCode", "costCenter", "description"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDataJournalLine",
|
||||||
|
"group": "data.feature.trustee.accountingData",
|
||||||
|
"fields": ["id", "journalEntryId", "accountNumber", "debitAmount", "creditAmount", "currency", "taxCode", "costCenter", "description"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDataContact",
|
"objectKey": "data.feature.trustee.TrusteeDataContact",
|
||||||
"label": "Kontakte (Sync)",
|
"label": t("Kontakte", context="UI"),
|
||||||
"meta": {"table": "TrusteeDataContact", "fields": ["id", "externalId", "contactType", "contactNumber", "name", "address", "zip", "city", "country", "email", "phone", "vatNumber"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDataContact",
|
||||||
|
"group": "data.feature.trustee.accountingData",
|
||||||
|
"fields": ["id", "externalId", "contactType", "contactNumber", "name", "address", "zip", "city", "country", "email", "phone", "vatNumber"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.TrusteeDataAccountBalance",
|
"objectKey": "data.feature.trustee.TrusteeDataAccountBalance",
|
||||||
"label": "Kontosalden (Sync)",
|
"label": t("Kontosalden", context="UI"),
|
||||||
"meta": {"table": "TrusteeDataAccountBalance", "fields": ["id", "accountNumber", "periodYear", "periodMonth", "openingBalance", "debitTotal", "creditTotal", "closingBalance", "currency"]}
|
"meta": {
|
||||||
|
"table": "TrusteeDataAccountBalance",
|
||||||
|
"group": "data.feature.trustee.accountingData",
|
||||||
|
"fields": ["id", "accountNumber", "periodYear", "periodMonth", "openingBalance", "debitTotal", "creditTotal", "closingBalance", "currency"],
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "data.feature.trustee.*",
|
"objectKey": "data.feature.trustee.*",
|
||||||
"label": "Alle Treuhand-Daten",
|
"label": t("Alle Treuhand-Daten", context="UI"),
|
||||||
"meta": {"wildcard": True, "description": "Wildcard for all trustee data tables"}
|
"meta": {"wildcard": True, "description": "Wildcard for all trustee data tables"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -145,67 +180,67 @@ DATA_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.documents.create",
|
"objectKey": "resource.feature.trustee.documents.create",
|
||||||
"label": "Dokument hochladen",
|
"label": t("Dokument hochladen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/documents", "method": "POST"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/documents", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.documents.update",
|
"objectKey": "resource.feature.trustee.documents.update",
|
||||||
"label": "Dokument aktualisieren",
|
"label": t("Dokument aktualisieren", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "PUT"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "PUT"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.documents.delete",
|
"objectKey": "resource.feature.trustee.documents.delete",
|
||||||
"label": "Dokument löschen",
|
"label": t("Dokument löschen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "DELETE"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "DELETE"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.positions.create",
|
"objectKey": "resource.feature.trustee.positions.create",
|
||||||
"label": "Position erstellen",
|
"label": t("Position erstellen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/positions", "method": "POST"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/positions", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.positions.update",
|
"objectKey": "resource.feature.trustee.positions.update",
|
||||||
"label": "Position aktualisieren",
|
"label": t("Position aktualisieren", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "PUT"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "PUT"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.positions.delete",
|
"objectKey": "resource.feature.trustee.positions.delete",
|
||||||
"label": "Position löschen",
|
"label": t("Position löschen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "DELETE"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "DELETE"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.instance-roles.manage",
|
"objectKey": "resource.feature.trustee.instance-roles.manage",
|
||||||
"label": "Instanz-Rollen verwalten",
|
"label": t("Instanz-Rollen verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/instance-roles", "method": "ALL", "admin_only": True}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/instance-roles", "method": "ALL", "admin_only": True}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.accounting.manage",
|
"objectKey": "resource.feature.trustee.accounting.manage",
|
||||||
"label": "Buchhaltungs-Integration verwalten",
|
"label": t("Buchhaltungs-Integration verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/config", "method": "ALL", "admin_only": True}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/config", "method": "ALL", "admin_only": True}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.accounting.sync",
|
"objectKey": "resource.feature.trustee.accounting.sync",
|
||||||
"label": "Buchhaltung synchronisieren",
|
"label": t("Buchhaltung synchronisieren", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync", "method": "POST"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.accounting.view",
|
"objectKey": "resource.feature.trustee.accounting.view",
|
||||||
"label": "Sync-Status einsehen",
|
"label": t("Sync-Status einsehen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync-status", "method": "GET"}
|
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync-status", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.workflows.view",
|
"objectKey": "resource.feature.trustee.workflows.view",
|
||||||
"label": "Workflows einsehen",
|
"label": t("Workflows einsehen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.workflows.execute",
|
"objectKey": "resource.feature.trustee.workflows.execute",
|
||||||
"label": "Workflows ausführen",
|
"label": t("Workflows ausführen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.trustee.workflows.manage",
|
"objectKey": "resource.feature.trustee.workflows.manage",
|
||||||
"label": "Workflows verwalten",
|
"label": t("Workflows verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "ALL", "admin_only": True}
|
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "ALL", "admin_only": True}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -223,43 +258,43 @@ QUICK_ACTION_CATEGORIES = [
|
||||||
QUICK_ACTIONS = [
|
QUICK_ACTIONS = [
|
||||||
{
|
{
|
||||||
"id": "trustee-process-receipts",
|
"id": "trustee-process-receipts",
|
||||||
"label": "Belege verarbeiten",
|
"label": t("Belege verarbeiten", context="UI"),
|
||||||
"description": "Belege aus SharePoint importieren, klassifizieren und verbuchen",
|
"description": "Belege aus SharePoint importieren, klassifizieren und verbuchen",
|
||||||
"icon": "mdi-file-document-check-outline",
|
"icon": "mdi-file-document-check-outline",
|
||||||
"color": "#4CAF50",
|
"color": "#4CAF50",
|
||||||
"category": "import",
|
"category": "import",
|
||||||
"actionType": "link",
|
"actionType": "link",
|
||||||
"config": {"targetView": "expense-import"},
|
"config": {"targetView": "import-process", "tab": "receipts"},
|
||||||
"requiredRoles": ["trustee-user", "trustee-accountant", "trustee-admin"],
|
"requiredRoles": ["trustee-user", "trustee-accountant", "trustee-admin"],
|
||||||
"sortOrder": 1,
|
"sortOrder": 1,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "trustee-sync-accounting",
|
|
||||||
"label": "Daten synchronisieren",
|
|
||||||
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
|
|
||||||
"icon": "mdi-sync",
|
|
||||||
"color": "#FF9800",
|
|
||||||
"category": "import",
|
|
||||||
"actionType": "link",
|
|
||||||
"config": {"targetView": "settings"},
|
|
||||||
"requiredRoles": ["trustee-accountant", "trustee-admin"],
|
|
||||||
"sortOrder": 2,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "trustee-upload-receipt",
|
"id": "trustee-upload-receipt",
|
||||||
"label": "Beleg hochladen",
|
"label": t("Beleg hochladen", context="UI"),
|
||||||
"description": "Beleg scannen oder als Datei hochladen",
|
"description": "Beleg scannen oder als Datei hochladen",
|
||||||
"icon": "mdi-camera-document-outline",
|
"icon": "mdi-camera-document-outline",
|
||||||
"color": "#607D8B",
|
"color": "#607D8B",
|
||||||
"category": "import",
|
"category": "import",
|
||||||
"actionType": "link",
|
"actionType": "link",
|
||||||
"config": {"targetView": "scan-upload"},
|
"config": {"targetView": "import-process", "tab": "upload"},
|
||||||
"requiredRoles": ["trustee-user", "trustee-client", "trustee-accountant", "trustee-admin"],
|
"requiredRoles": ["trustee-user", "trustee-client", "trustee-accountant", "trustee-admin"],
|
||||||
|
"sortOrder": 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "trustee-sync-accounting",
|
||||||
|
"label": t("Daten einlesen", context="UI"),
|
||||||
|
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
|
||||||
|
"icon": "mdi-sync",
|
||||||
|
"color": "#FF9800",
|
||||||
|
"category": "import",
|
||||||
|
"actionType": "link",
|
||||||
|
"config": {"targetView": "settings", "tab": "import-data"},
|
||||||
|
"requiredRoles": ["trustee-accountant", "trustee-admin"],
|
||||||
"sortOrder": 3,
|
"sortOrder": 3,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-budget-comparison",
|
"id": "trustee-budget-comparison",
|
||||||
"label": "Budget-Vergleich",
|
"label": t("Budget-Vergleich", context="UI"),
|
||||||
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
|
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
|
||||||
"icon": "mdi-chart-bar",
|
"icon": "mdi-chart-bar",
|
||||||
"color": "#2196F3",
|
"color": "#2196F3",
|
||||||
|
|
@ -271,7 +306,7 @@ QUICK_ACTIONS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-kpi-dashboard",
|
"id": "trustee-kpi-dashboard",
|
||||||
"label": "KPI-Dashboard",
|
"label": t("KPI-Dashboard", context="UI"),
|
||||||
"description": "Kennzahlen berechnen und visualisieren",
|
"description": "Kennzahlen berechnen und visualisieren",
|
||||||
"icon": "mdi-view-dashboard-outline",
|
"icon": "mdi-view-dashboard-outline",
|
||||||
"color": "#9C27B0",
|
"color": "#9C27B0",
|
||||||
|
|
@ -283,7 +318,7 @@ QUICK_ACTIONS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-cashflow",
|
"id": "trustee-cashflow",
|
||||||
"label": "Cashflow-Rechnung",
|
"label": t("Cashflow-Rechnung", context="UI"),
|
||||||
"description": "Cashflow berechnen und analysieren",
|
"description": "Cashflow berechnen und analysieren",
|
||||||
"icon": "mdi-cash-multiple",
|
"icon": "mdi-cash-multiple",
|
||||||
"color": "#009688",
|
"color": "#009688",
|
||||||
|
|
@ -295,7 +330,7 @@ QUICK_ACTIONS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-forecast",
|
"id": "trustee-forecast",
|
||||||
"label": "Prognose erstellen",
|
"label": t("Prognose erstellen", context="UI"),
|
||||||
"description": "Trend-Analyse und Prognose der nächsten Monate",
|
"description": "Trend-Analyse und Prognose der nächsten Monate",
|
||||||
"icon": "mdi-chart-timeline-variant",
|
"icon": "mdi-chart-timeline-variant",
|
||||||
"color": "#E91E63",
|
"color": "#E91E63",
|
||||||
|
|
@ -307,7 +342,7 @@ QUICK_ACTIONS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-year-end-check",
|
"id": "trustee-year-end-check",
|
||||||
"label": "Jahresabschluss prüfen",
|
"label": t("Jahresabschluss prüfen", context="UI"),
|
||||||
"description": "Automatische Prüfungen für den Jahresabschluss",
|
"description": "Automatische Prüfungen für den Jahresabschluss",
|
||||||
"icon": "mdi-clipboard-check-outline",
|
"icon": "mdi-clipboard-check-outline",
|
||||||
"color": "#795548",
|
"color": "#795548",
|
||||||
|
|
@ -350,7 +385,7 @@ def _buildAnalysisWorkflowGraph(prompt: str) -> Dict[str, Any]:
|
||||||
TEMPLATE_WORKFLOWS = [
|
TEMPLATE_WORKFLOWS = [
|
||||||
{
|
{
|
||||||
"id": "trustee-receipt-import",
|
"id": "trustee-receipt-import",
|
||||||
"label": "Beleg-Import Pipeline",
|
"label": t("Beleg-Import Pipeline", context="UI"),
|
||||||
"description": "Belege extrahieren, verarbeiten und in Buchhaltung synchronisieren",
|
"description": "Belege extrahieren, verarbeiten und in Buchhaltung synchronisieren",
|
||||||
"tags": ["feature:trustee", "template:trustee-receipt-import"],
|
"tags": ["feature:trustee", "template:trustee-receipt-import"],
|
||||||
"graph": {
|
"graph": {
|
||||||
|
|
@ -372,7 +407,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-sync-accounting",
|
"id": "trustee-sync-accounting",
|
||||||
"label": "Buchhaltung synchronisieren",
|
"label": t("Buchhaltung synchronisieren", context="UI"),
|
||||||
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
|
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
|
||||||
"tags": ["feature:trustee", "template:trustee-sync-accounting"],
|
"tags": ["feature:trustee", "template:trustee-sync-accounting"],
|
||||||
"graph": {
|
"graph": {
|
||||||
|
|
@ -388,7 +423,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-budget-comparison",
|
"id": "trustee-budget-comparison",
|
||||||
"label": "Budget-Vergleich",
|
"label": t("Budget-Vergleich", context="UI"),
|
||||||
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
|
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
|
||||||
"tags": ["feature:trustee", "template:trustee-budget-comparison"],
|
"tags": ["feature:trustee", "template:trustee-budget-comparison"],
|
||||||
"graph": {
|
"graph": {
|
||||||
|
|
@ -421,7 +456,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-kpi-dashboard",
|
"id": "trustee-kpi-dashboard",
|
||||||
"label": "KPI-Dashboard",
|
"label": t("KPI-Dashboard", context="UI"),
|
||||||
"description": "Kennzahlen berechnen und visualisieren",
|
"description": "Kennzahlen berechnen und visualisieren",
|
||||||
"tags": ["feature:trustee", "template:trustee-kpi-dashboard"],
|
"tags": ["feature:trustee", "template:trustee-kpi-dashboard"],
|
||||||
"graph": _buildAnalysisWorkflowGraph(
|
"graph": _buildAnalysisWorkflowGraph(
|
||||||
|
|
@ -438,7 +473,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-cashflow",
|
"id": "trustee-cashflow",
|
||||||
"label": "Cashflow-Rechnung",
|
"label": t("Cashflow-Rechnung", context="UI"),
|
||||||
"description": "Cashflow berechnen und analysieren",
|
"description": "Cashflow berechnen und analysieren",
|
||||||
"tags": ["feature:trustee", "template:trustee-cashflow"],
|
"tags": ["feature:trustee", "template:trustee-cashflow"],
|
||||||
"graph": _buildAnalysisWorkflowGraph(
|
"graph": _buildAnalysisWorkflowGraph(
|
||||||
|
|
@ -452,7 +487,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-forecast",
|
"id": "trustee-forecast",
|
||||||
"label": "Prognose erstellen",
|
"label": t("Prognose erstellen", context="UI"),
|
||||||
"description": "Trend-Analyse und Prognose der nächsten Monate",
|
"description": "Trend-Analyse und Prognose der nächsten Monate",
|
||||||
"tags": ["feature:trustee", "template:trustee-forecast"],
|
"tags": ["feature:trustee", "template:trustee-forecast"],
|
||||||
"graph": _buildAnalysisWorkflowGraph(
|
"graph": _buildAnalysisWorkflowGraph(
|
||||||
|
|
@ -467,7 +502,7 @@ TEMPLATE_WORKFLOWS = [
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "trustee-year-end-check",
|
"id": "trustee-year-end-check",
|
||||||
"label": "Jahresabschluss prüfen",
|
"label": t("Jahresabschluss prüfen", context="UI"),
|
||||||
"description": "Automatische Prüfungen für den Jahresabschluss",
|
"description": "Automatische Prüfungen für den Jahresabschluss",
|
||||||
"tags": ["feature:trustee", "template:trustee-year-end-check"],
|
"tags": ["feature:trustee", "template:trustee-year-end-check"],
|
||||||
"graph": _buildAnalysisWorkflowGraph(
|
"graph": _buildAnalysisWorkflowGraph(
|
||||||
|
|
@ -489,8 +524,7 @@ TEMPLATE_ROLES = [
|
||||||
"description": "Treuhand-Betrachter - Treuhand-Daten einsehen (nur lesen)",
|
"description": "Treuhand-Betrachter - Treuhand-Daten einsehen (nur lesen)",
|
||||||
"accessRules": [
|
"accessRules": [
|
||||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.data-tables", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
|
||||||
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.view", "view": True},
|
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.view", "view": True},
|
||||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||||
],
|
],
|
||||||
|
|
@ -500,9 +534,8 @@ TEMPLATE_ROLES = [
|
||||||
"description": "Treuhand-Benutzer - Eigene Treuhand-Daten erstellen und verwalten",
|
"description": "Treuhand-Benutzer - Eigene Treuhand-Daten erstellen und verwalten",
|
||||||
"accessRules": [
|
"accessRules": [
|
||||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.data-tables", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.import-process", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.expense-import", "view": True},
|
|
||||||
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.view", "view": True},
|
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.view", "view": True},
|
||||||
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.execute", "view": True},
|
{"context": "RESOURCE", "item": "resource.feature.trustee.workflows.execute", "view": True},
|
||||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||||
|
|
@ -525,8 +558,7 @@ TEMPLATE_ROLES = [
|
||||||
"description": "Treuhand-Buchhalter - Buchhaltungs- und Finanzdaten verwalten",
|
"description": "Treuhand-Buchhalter - Buchhaltungs- und Finanzdaten verwalten",
|
||||||
"accessRules": [
|
"accessRules": [
|
||||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.data-tables", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
|
||||||
{"context": "UI", "item": "ui.feature.trustee.analyse", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.analyse", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.abschluss", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.abschluss", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.settings", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.settings", "view": True},
|
||||||
|
|
@ -542,10 +574,8 @@ TEMPLATE_ROLES = [
|
||||||
"description": "Treuhand-Kunde - Eigene Buchhaltungsdaten und Dokumente einsehen",
|
"description": "Treuhand-Kunde - Eigene Buchhaltungsdaten und Dokumente einsehen",
|
||||||
"accessRules": [
|
"accessRules": [
|
||||||
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.dashboard", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.positions", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.data-tables", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.documents", "view": True},
|
{"context": "UI", "item": "ui.feature.trustee.import-process", "view": True},
|
||||||
{"context": "UI", "item": "ui.feature.trustee.expense-import", "view": True},
|
|
||||||
{"context": "UI", "item": "ui.feature.trustee.scan-upload", "view": True},
|
|
||||||
{"context": "DATA", "item": "data.feature.trustee.TrusteePosition", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
{"context": "DATA", "item": "data.feature.trustee.TrusteePosition", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||||
{"context": "DATA", "item": "data.feature.trustee.TrusteeDocument", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
{"context": "DATA", "item": "data.feature.trustee.TrusteeDocument", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||||
],
|
],
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,13 @@ from .datamodelFeatureTrustee import (
|
||||||
TrusteeContract,
|
TrusteeContract,
|
||||||
TrusteeDocument,
|
TrusteeDocument,
|
||||||
TrusteePosition,
|
TrusteePosition,
|
||||||
|
TrusteeDataAccount,
|
||||||
|
TrusteeDataJournalEntry,
|
||||||
|
TrusteeDataJournalLine,
|
||||||
|
TrusteeDataContact,
|
||||||
|
TrusteeDataAccountBalance,
|
||||||
|
TrusteeAccountingConfig,
|
||||||
|
TrusteeAccountingSync,
|
||||||
)
|
)
|
||||||
from modules.datamodels.datamodelPagination import (
|
from modules.datamodels.datamodelPagination import (
|
||||||
PaginationParams,
|
PaginationParams,
|
||||||
|
|
@ -138,21 +145,24 @@ def getQuickActions(
|
||||||
from .mainTrustee import QUICK_ACTIONS, QUICK_ACTION_CATEGORIES
|
from .mainTrustee import QUICK_ACTIONS, QUICK_ACTION_CATEGORIES
|
||||||
|
|
||||||
userRoleLabels: set = set()
|
userRoleLabels: set = set()
|
||||||
|
rootInterface = getRootInterface()
|
||||||
if context.isPlatformAdmin:
|
if context.isPlatformAdmin:
|
||||||
userRoleLabels.add("trustee-admin")
|
userRoleLabels.add("trustee-admin")
|
||||||
else:
|
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
|
||||||
rootInterface = getRootInterface()
|
for fa in featureAccesses:
|
||||||
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
|
if str(fa.featureInstanceId) == instanceId and fa.enabled:
|
||||||
for fa in featureAccesses:
|
# FeatureAccess (Pydantic) has no `roleIds` field; the join lives in
|
||||||
if str(fa.featureInstanceId) == instanceId and fa.enabled:
|
# FeatureAccessRole and must be looked up via the interface helper.
|
||||||
roleIds = fa.roleIds if hasattr(fa, "roleIds") and fa.roleIds else []
|
roleIds = rootInterface.getRoleIdsForFeatureAccess(str(fa.id))
|
||||||
for rid in roleIds:
|
for rid in roleIds:
|
||||||
role = rootInterface.getRole(str(rid))
|
role = rootInterface.getRole(str(rid))
|
||||||
if role and role.roleLabel:
|
if role and role.roleLabel:
|
||||||
userRoleLabels.add(role.roleLabel)
|
userRoleLabels.add(role.roleLabel)
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import resolveText
|
from modules.shared.i18nRegistry import resolveText
|
||||||
|
|
||||||
|
lang = (language or "de").strip() or "de"
|
||||||
|
|
||||||
filteredActions = []
|
filteredActions = []
|
||||||
for action in QUICK_ACTIONS:
|
for action in QUICK_ACTIONS:
|
||||||
required = set(action.get("requiredRoles", []))
|
required = set(action.get("requiredRoles", []))
|
||||||
|
|
@ -161,8 +171,8 @@ def getQuickActions(
|
||||||
if context.isPlatformAdmin or required.intersection(userRoleLabels):
|
if context.isPlatformAdmin or required.intersection(userRoleLabels):
|
||||||
resolved = {
|
resolved = {
|
||||||
"id": action["id"],
|
"id": action["id"],
|
||||||
"label": resolveText(action.get("label", {})),
|
"label": resolveText(action.get("label", {}), lang=lang),
|
||||||
"description": resolveText(action.get("description", {})),
|
"description": resolveText(action.get("description", {}), lang=lang),
|
||||||
"icon": action.get("icon", ""),
|
"icon": action.get("icon", ""),
|
||||||
"color": action.get("color", ""),
|
"color": action.get("color", ""),
|
||||||
"category": action.get("category", ""),
|
"category": action.get("category", ""),
|
||||||
|
|
@ -173,14 +183,14 @@ def getQuickActions(
|
||||||
if resolved["actionType"] == "agentPrompt" and "config" in resolved:
|
if resolved["actionType"] == "agentPrompt" and "config" in resolved:
|
||||||
cfg = dict(resolved["config"])
|
cfg = dict(resolved["config"])
|
||||||
if "uploadHint" in cfg:
|
if "uploadHint" in cfg:
|
||||||
cfg["uploadHint"] = resolveText(cfg["uploadHint"])
|
cfg["uploadHint"] = resolveText(cfg["uploadHint"], lang=lang)
|
||||||
resolved["config"] = cfg
|
resolved["config"] = cfg
|
||||||
filteredActions.append(resolved)
|
filteredActions.append(resolved)
|
||||||
|
|
||||||
filteredActions.sort(key=lambda a: a["sortOrder"])
|
filteredActions.sort(key=lambda a: a["sortOrder"])
|
||||||
|
|
||||||
resolvedCategories = [
|
resolvedCategories = [
|
||||||
{"id": c["id"], "label": resolveText(c.get("label", {})), "sortOrder": c.get("sortOrder", 99)}
|
{"id": c["id"], "label": resolveText(c.get("label", {}), lang=lang), "sortOrder": c.get("sortOrder", 99)}
|
||||||
for c in QUICK_ACTION_CATEGORIES
|
for c in QUICK_ACTION_CATEGORIES
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -199,6 +209,14 @@ _TRUSTEE_ENTITY_MODELS = {
|
||||||
"TrusteeContract": TrusteeContract,
|
"TrusteeContract": TrusteeContract,
|
||||||
"TrusteeDocument": TrusteeDocument,
|
"TrusteeDocument": TrusteeDocument,
|
||||||
"TrusteePosition": TrusteePosition,
|
"TrusteePosition": TrusteePosition,
|
||||||
|
# Read-only sync tables (TrusteeData*) and accounting bookkeeping
|
||||||
|
"TrusteeDataAccount": TrusteeDataAccount,
|
||||||
|
"TrusteeDataJournalEntry": TrusteeDataJournalEntry,
|
||||||
|
"TrusteeDataJournalLine": TrusteeDataJournalLine,
|
||||||
|
"TrusteeDataContact": TrusteeDataContact,
|
||||||
|
"TrusteeDataAccountBalance": TrusteeDataAccountBalance,
|
||||||
|
"TrusteeAccountingConfig": TrusteeAccountingConfig,
|
||||||
|
"TrusteeAccountingSync": TrusteeAccountingSync,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1643,7 +1661,47 @@ def get_position_sync_status(
|
||||||
|
|
||||||
# ===== Accounting Data Import =====
|
# ===== Accounting Data Import =====
|
||||||
|
|
||||||
@router.post("/{instanceId}/accounting/import-data")
|
TRUSTEE_ACCOUNTING_SYNC_JOB_TYPE = "trusteeAccountingSync"
|
||||||
|
|
||||||
|
|
||||||
|
async def _trusteeAccountingSyncJobHandler(job: Dict[str, Any], progressCb) -> Dict[str, Any]:
|
||||||
|
"""BackgroundJob handler: imports accounting data from the external system.
|
||||||
|
|
||||||
|
Reads inputs from `job["payload"]` (dateFrom, dateTo, userId) and runs
|
||||||
|
`AccountingDataSync.importData(...)` in the worker's event loop without
|
||||||
|
blocking the original HTTP request that submitted the job.
|
||||||
|
"""
|
||||||
|
from modules.security.rootAccess import getRootUser
|
||||||
|
from .accounting.accountingDataSync import AccountingDataSync
|
||||||
|
|
||||||
|
instanceId = job["featureInstanceId"]
|
||||||
|
mandateId = job["mandateId"]
|
||||||
|
payload = job.get("payload") or {}
|
||||||
|
rootUser = getRootUser()
|
||||||
|
|
||||||
|
progressCb(5, "Initialisiere Import...")
|
||||||
|
interface = getInterface(rootUser, mandateId=mandateId, featureInstanceId=instanceId)
|
||||||
|
sync = AccountingDataSync(interface)
|
||||||
|
progressCb(10, "Verbinde mit Buchhaltungssystem...")
|
||||||
|
result = await sync.importData(
|
||||||
|
featureInstanceId=instanceId,
|
||||||
|
mandateId=mandateId,
|
||||||
|
dateFrom=payload.get("dateFrom"),
|
||||||
|
dateTo=payload.get("dateTo"),
|
||||||
|
progressCb=progressCb,
|
||||||
|
)
|
||||||
|
progressCb(100, "Import abgeschlossen.")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from modules.serviceCenter.services.serviceBackgroundJobs import registerJobHandler
|
||||||
|
registerJobHandler(TRUSTEE_ACCOUNTING_SYNC_JOB_TYPE, _trusteeAccountingSyncJobHandler)
|
||||||
|
except Exception as _regErr:
|
||||||
|
logger.warning("Failed to register trusteeAccountingSync job handler: %s", _regErr)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/accounting/import-data", status_code=status.HTTP_202_ACCEPTED)
|
||||||
@limiter.limit("3/minute")
|
@limiter.limit("3/minute")
|
||||||
async def import_accounting_data(
|
async def import_accounting_data(
|
||||||
request: Request,
|
request: Request,
|
||||||
|
|
@ -1651,20 +1709,26 @@ async def import_accounting_data(
|
||||||
data: Dict[str, Any] = Body(default={}),
|
data: Dict[str, Any] = Body(default={}),
|
||||||
context: RequestContext = Depends(getRequestContext)
|
context: RequestContext = Depends(getRequestContext)
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Import accounting data (chart, journal entries, contacts) from the external system into TrusteeData* tables."""
|
"""Submit a background job to import accounting data.
|
||||||
|
|
||||||
|
Returns immediately with `{ jobId }`; clients poll `GET /api/jobs/{jobId}`
|
||||||
|
until status is SUCCESS / ERROR.
|
||||||
|
"""
|
||||||
|
from modules.serviceCenter.services.serviceBackgroundJobs import startJob
|
||||||
|
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
payload = {
|
||||||
from .accounting.accountingDataSync import AccountingDataSync
|
"dateFrom": data.get("dateFrom"),
|
||||||
sync = AccountingDataSync(interface)
|
"dateTo": data.get("dateTo"),
|
||||||
dateFrom = data.get("dateFrom")
|
}
|
||||||
dateTo = data.get("dateTo")
|
jobId = await startJob(
|
||||||
result = await sync.importData(
|
TRUSTEE_ACCOUNTING_SYNC_JOB_TYPE,
|
||||||
featureInstanceId=instanceId,
|
payload,
|
||||||
mandateId=mandateId,
|
mandateId=mandateId,
|
||||||
dateFrom=dateFrom,
|
featureInstanceId=instanceId,
|
||||||
dateTo=dateTo,
|
triggeredBy=context.user.id if context.user else None,
|
||||||
)
|
)
|
||||||
return result
|
return {"jobId": jobId, "status": "pending"}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{instanceId}/accounting/import-status")
|
@router.get("/{instanceId}/accounting/import-status")
|
||||||
|
|
@ -1695,6 +1759,9 @@ def get_import_status(
|
||||||
counts["lastSyncAt"] = cfg.get("lastSyncAt")
|
counts["lastSyncAt"] = cfg.get("lastSyncAt")
|
||||||
counts["lastSyncStatus"] = cfg.get("lastSyncStatus")
|
counts["lastSyncStatus"] = cfg.get("lastSyncStatus")
|
||||||
counts["lastSyncErrorMessage"] = cfg.get("lastSyncErrorMessage")
|
counts["lastSyncErrorMessage"] = cfg.get("lastSyncErrorMessage")
|
||||||
|
counts["lastSyncDateFrom"] = cfg.get("lastSyncDateFrom")
|
||||||
|
counts["lastSyncDateTo"] = cfg.get("lastSyncDateTo")
|
||||||
|
counts["lastSyncCounts"] = cfg.get("lastSyncCounts")
|
||||||
return counts
|
return counts
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1836,6 +1903,20 @@ def _validateInstanceAdmin(instanceId: str, context: RequestContext) -> str:
|
||||||
return mandateId
|
return mandateId
|
||||||
|
|
||||||
|
|
||||||
|
def _serializeRoleForApi(role) -> Dict[str, Any]:
|
||||||
|
"""Dump a Role and resolve the multilingual ``description`` to a plain string.
|
||||||
|
|
||||||
|
The Role.description field is a ``TextMultilingual`` (``{xx, de, en, ...}``).
|
||||||
|
The frontend expects a plain string, so we resolve via the request language
|
||||||
|
here (same pattern as ``getQuickActions``). Without this the React tree
|
||||||
|
crashes with "Objects are not valid as a React child".
|
||||||
|
"""
|
||||||
|
from modules.shared.i18nRegistry import resolveText
|
||||||
|
payload = role.model_dump()
|
||||||
|
payload["description"] = resolveText(payload.get("description"))
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{instanceId}/instance-roles", response_model=PaginatedResponse)
|
@router.get("/{instanceId}/instance-roles", response_model=PaginatedResponse)
|
||||||
@limiter.limit("30/minute")
|
@limiter.limit("30/minute")
|
||||||
def get_instance_roles(
|
def get_instance_roles(
|
||||||
|
|
@ -1855,7 +1936,7 @@ def get_instance_roles(
|
||||||
roles = rootInterface.getRolesByFeatureCode("trustee", featureInstanceId=instanceId)
|
roles = rootInterface.getRolesByFeatureCode("trustee", featureInstanceId=instanceId)
|
||||||
|
|
||||||
return PaginatedResponse(
|
return PaginatedResponse(
|
||||||
items=[r.model_dump() for r in roles],
|
items=[_serializeRoleForApi(r) for r in roles],
|
||||||
pagination=None
|
pagination=None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -1881,7 +1962,7 @@ def get_instance_role(
|
||||||
if str(role.featureInstanceId) != instanceId:
|
if str(role.featureInstanceId) != instanceId:
|
||||||
raise HTTPException(status_code=404, detail=f"Role {roleId} not found in this instance")
|
raise HTTPException(status_code=404, detail=f"Role {roleId} not found in this instance")
|
||||||
|
|
||||||
return role.model_dump()
|
return _serializeRoleForApi(role)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{instanceId}/instance-roles/{roleId}/rules", response_model=PaginatedResponse)
|
@router.get("/{instanceId}/instance-roles/{roleId}/rules", response_model=PaginatedResponse)
|
||||||
|
|
@ -2049,3 +2130,277 @@ def delete_instance_role_rule(
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error deleting AccessRule: {e}")
|
logger.error(f"Error deleting AccessRule: {e}")
|
||||||
raise HTTPException(status_code=400, detail=f"Failed to delete rule: {str(e)}")
|
raise HTTPException(status_code=400, detail=f"Failed to delete rule: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Generic Read-Only Data Tables (consolidated TrusteeDataTablesView)
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# These endpoints expose the seven additional Trustee tables that previously
|
||||||
|
# only had aggregate or specialised views. They are read-only:
|
||||||
|
# - TrusteeData* tables are populated by the accounting sync; manual edits
|
||||||
|
# would be overwritten on the next sync.
|
||||||
|
# - TrusteeAccountingConfig / TrusteeAccountingSync are operational records
|
||||||
|
# maintained by the connector layer.
|
||||||
|
#
|
||||||
|
# All seven endpoints share one helper (`_paginatedReadEndpoint`) that
|
||||||
|
# replicates the established pattern from `get_documents` / `get_positions`
|
||||||
|
# (Unified Filter API: mode=filterValues / mode=ids).
|
||||||
|
|
||||||
|
|
||||||
|
def _paginatedReadEndpoint(
|
||||||
|
*,
|
||||||
|
instanceId: str,
|
||||||
|
context: RequestContext,
|
||||||
|
modelClass,
|
||||||
|
pagination: Optional[str],
|
||||||
|
mode: Optional[str],
|
||||||
|
column: Optional[str],
|
||||||
|
):
|
||||||
|
"""Generic paginated, RBAC-aware GET handler for a Trustee data model.
|
||||||
|
|
||||||
|
Mirrors the pattern used by `get_documents` / `get_positions`:
|
||||||
|
- mode=filterValues: distinct column values for filter UI
|
||||||
|
- mode=ids: full id list for "select all matching"
|
||||||
|
- default: paginated result via `getRecordsetPaginatedWithRBAC`
|
||||||
|
"""
|
||||||
|
from modules.interfaces.interfaceRbac import (
|
||||||
|
getRecordsetPaginatedWithRBAC,
|
||||||
|
getDistinctColumnValuesWithRBAC,
|
||||||
|
)
|
||||||
|
from modules.routes.routeHelpers import (
|
||||||
|
handleFilterValuesInMemory,
|
||||||
|
handleIdsInMemory,
|
||||||
|
parseCrossFilterPagination,
|
||||||
|
)
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||||
|
|
||||||
|
if mode == "filterValues":
|
||||||
|
if not column:
|
||||||
|
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||||
|
try:
|
||||||
|
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||||
|
values = getDistinctColumnValuesWithRBAC(
|
||||||
|
connector=interface.db,
|
||||||
|
modelClass=modelClass,
|
||||||
|
column=column,
|
||||||
|
currentUser=interface.currentUser,
|
||||||
|
pagination=crossFilterPagination,
|
||||||
|
recordFilter=None,
|
||||||
|
mandateId=interface.mandateId,
|
||||||
|
featureInstanceId=interface.featureInstanceId,
|
||||||
|
featureCode=interface.FEATURE_CODE,
|
||||||
|
)
|
||||||
|
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||||
|
except Exception:
|
||||||
|
result = getRecordsetPaginatedWithRBAC(
|
||||||
|
connector=interface.db,
|
||||||
|
modelClass=modelClass,
|
||||||
|
currentUser=interface.currentUser,
|
||||||
|
pagination=None,
|
||||||
|
recordFilter=None,
|
||||||
|
mandateId=interface.mandateId,
|
||||||
|
featureInstanceId=interface.featureInstanceId,
|
||||||
|
featureCode=interface.FEATURE_CODE,
|
||||||
|
)
|
||||||
|
items = result.items if hasattr(result, "items") else result
|
||||||
|
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||||
|
return handleFilterValuesInMemory(items, column, pagination)
|
||||||
|
|
||||||
|
if mode == "ids":
|
||||||
|
result = getRecordsetPaginatedWithRBAC(
|
||||||
|
connector=interface.db,
|
||||||
|
modelClass=modelClass,
|
||||||
|
currentUser=interface.currentUser,
|
||||||
|
pagination=None,
|
||||||
|
recordFilter=None,
|
||||||
|
mandateId=interface.mandateId,
|
||||||
|
featureInstanceId=interface.featureInstanceId,
|
||||||
|
featureCode=interface.FEATURE_CODE,
|
||||||
|
)
|
||||||
|
items = result.items if hasattr(result, "items") else result
|
||||||
|
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||||
|
return handleIdsInMemory(items, pagination)
|
||||||
|
|
||||||
|
paginationParams = _parsePagination(pagination)
|
||||||
|
result = getRecordsetPaginatedWithRBAC(
|
||||||
|
connector=interface.db,
|
||||||
|
modelClass=modelClass,
|
||||||
|
currentUser=interface.currentUser,
|
||||||
|
pagination=paginationParams,
|
||||||
|
recordFilter=None,
|
||||||
|
mandateId=interface.mandateId,
|
||||||
|
featureInstanceId=interface.featureInstanceId,
|
||||||
|
featureCode=interface.FEATURE_CODE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if paginationParams and hasattr(result, "items"):
|
||||||
|
return PaginatedResponse(
|
||||||
|
items=result.items,
|
||||||
|
pagination=PaginationMetadata(
|
||||||
|
currentPage=paginationParams.page or 1,
|
||||||
|
pageSize=paginationParams.pageSize or 20,
|
||||||
|
totalItems=result.totalItems,
|
||||||
|
totalPages=result.totalPages,
|
||||||
|
sort=paginationParams.sort if paginationParams else [],
|
||||||
|
filters=paginationParams.filters if paginationParams else None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
items = result.items if hasattr(result, "items") else result
|
||||||
|
return PaginatedResponse(items=items, pagination=None)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/data/accounts", response_model=PaginatedResponse[TrusteeDataAccount])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_data_accounts(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of synced chart-of-accounts entries (TrusteeDataAccount)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeDataAccount,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/data/journal-entries", response_model=PaginatedResponse[TrusteeDataJournalEntry])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_data_journal_entries(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of synced journal entries (TrusteeDataJournalEntry)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeDataJournalEntry,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/data/journal-lines", response_model=PaginatedResponse[TrusteeDataJournalLine])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_data_journal_lines(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of synced journal lines (TrusteeDataJournalLine)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeDataJournalLine,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/data/contacts", response_model=PaginatedResponse[TrusteeDataContact])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_data_contacts(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of synced contacts (TrusteeDataContact)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeDataContact,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/data/account-balances", response_model=PaginatedResponse[TrusteeDataAccountBalance])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_data_account_balances(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of synced account balances (TrusteeDataAccountBalance)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeDataAccountBalance,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/accounting/configs", response_model=PaginatedResponse[TrusteeAccountingConfig])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_accounting_configs(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of accounting connector configurations (TrusteeAccountingConfig).
|
||||||
|
|
||||||
|
Note: secret config fields are stored masked in the underlying record;
|
||||||
|
UI consumers must rely on the dedicated `/accounting/config` endpoint
|
||||||
|
for secret-aware editing.
|
||||||
|
"""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeAccountingConfig,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/accounting/syncs", response_model=PaginatedResponse[TrusteeAccountingSync])
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def get_accounting_syncs(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||||
|
pagination: Optional[str] = Query(None),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Read-only list of accounting sync records (TrusteeAccountingSync)."""
|
||||||
|
return _paginatedReadEndpoint(
|
||||||
|
instanceId=instanceId,
|
||||||
|
context=context,
|
||||||
|
modelClass=TrusteeAccountingSync,
|
||||||
|
pagination=pagination,
|
||||||
|
mode=mode,
|
||||||
|
column=column,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,31 +9,33 @@ Unified AI Workspace feature.
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
FEATURE_CODE = "workspace"
|
FEATURE_CODE = "workspace"
|
||||||
FEATURE_LABEL = "AI Workspace"
|
FEATURE_LABEL = t("AI Workspace", context="UI")
|
||||||
FEATURE_ICON = "mdi-brain"
|
FEATURE_ICON = "mdi-brain"
|
||||||
|
|
||||||
UI_OBJECTS = [
|
UI_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.workspace.dashboard",
|
"objectKey": "ui.feature.workspace.dashboard",
|
||||||
"label": "Dashboard",
|
"label": t("Dashboard", context="UI"),
|
||||||
"meta": {"area": "dashboard"}
|
"meta": {"area": "dashboard"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.workspace.editor",
|
"objectKey": "ui.feature.workspace.editor",
|
||||||
"label": "Editor",
|
"label": t("Editor", context="UI"),
|
||||||
"meta": {"area": "editor"}
|
"meta": {"area": "editor"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.workspace.settings",
|
"objectKey": "ui.feature.workspace.settings",
|
||||||
"label": "Einstellungen",
|
"label": t("Einstellungen", context="UI"),
|
||||||
"meta": {"area": "settings"}
|
"meta": {"area": "settings"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "ui.feature.workspace.rag-insights",
|
"objectKey": "ui.feature.workspace.rag-insights",
|
||||||
"label": "Wissens-Insights",
|
"label": t("Wissens-Insights", context="UI"),
|
||||||
"meta": {"area": "rag-insights"},
|
"meta": {"area": "rag-insights"},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
@ -41,37 +43,37 @@ UI_OBJECTS = [
|
||||||
RESOURCE_OBJECTS = [
|
RESOURCE_OBJECTS = [
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.start",
|
"objectKey": "resource.feature.workspace.start",
|
||||||
"label": "Agent starten",
|
"label": t("Agent starten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/start/stream", "method": "POST"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/start/stream", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.stop",
|
"objectKey": "resource.feature.workspace.stop",
|
||||||
"label": "Agent stoppen",
|
"label": t("Agent stoppen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/{workflowId}/stop", "method": "POST"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/{workflowId}/stop", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.files",
|
"objectKey": "resource.feature.workspace.files",
|
||||||
"label": "Dateien verwalten",
|
"label": t("Dateien verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/files", "method": "GET"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/files", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.folders",
|
"objectKey": "resource.feature.workspace.folders",
|
||||||
"label": "Ordner verwalten",
|
"label": t("Ordner verwalten", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/folders", "method": "GET"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/folders", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.datasources",
|
"objectKey": "resource.feature.workspace.datasources",
|
||||||
"label": "Datenquellen",
|
"label": t("Datenquellen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/datasources", "method": "GET"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/datasources", "method": "GET"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.voice",
|
"objectKey": "resource.feature.workspace.voice",
|
||||||
"label": "Spracheingabe/-ausgabe",
|
"label": t("Spracheingabe/-ausgabe", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/voice/*", "method": "POST"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/voice/*", "method": "POST"}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"objectKey": "resource.feature.workspace.edits",
|
"objectKey": "resource.feature.workspace.edits",
|
||||||
"label": "Datei-Aenderungen pruefen",
|
"label": t("Datei-Aenderungen pruefen", context="UI"),
|
||||||
"meta": {"endpoint": "/api/workspace/{instanceId}/edit/*", "method": "POST"}
|
"meta": {"endpoint": "/api/workspace/{instanceId}/edit/*", "method": "POST"}
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -603,6 +603,17 @@ async def streamWorkspaceStart(
|
||||||
|
|
||||||
chatInterface.createMessage(userMessageData)
|
chatInterface.createMessage(userMessageData)
|
||||||
|
|
||||||
|
# Persist the attached data sources on the workflow so the chip-bar can
|
||||||
|
# be restored when the user re-opens this chat (per-chat persistence).
|
||||||
|
# Sources that no longer resolve are filtered out client-side on load.
|
||||||
|
try:
|
||||||
|
chatInterface.updateWorkflow(workflowId, {
|
||||||
|
"attachedDataSourceIds": list(userInput.dataSourceIds or []),
|
||||||
|
"attachedFeatureDataSourceIds": list(userInput.featureDataSourceIds or []),
|
||||||
|
})
|
||||||
|
except Exception as persistErr:
|
||||||
|
logger.warning(f"Could not persist chat attachments for {workflowId}: {persistErr}")
|
||||||
|
|
||||||
agentTask = asyncio.ensure_future(
|
agentTask = asyncio.ensure_future(
|
||||||
_runWorkspaceAgent(
|
_runWorkspaceAgent(
|
||||||
workflowId=workflowId,
|
workflowId=workflowId,
|
||||||
|
|
@ -1112,7 +1123,12 @@ async def getWorkspaceMessages(
|
||||||
workflowId: str = Path(...),
|
workflowId: str = Path(...),
|
||||||
context: RequestContext = Depends(getRequestContext),
|
context: RequestContext = Depends(getRequestContext),
|
||||||
):
|
):
|
||||||
"""Get all messages for a workspace workflow/conversation."""
|
"""Get all messages for a workspace workflow/conversation.
|
||||||
|
|
||||||
|
Also returns the IDs of data sources that were attached the last time the
|
||||||
|
user sent a message in this chat, so the WorkspaceInput can rehydrate its
|
||||||
|
chip-bar (per-chat attachment persistence).
|
||||||
|
"""
|
||||||
_mandateId, _ = _validateInstanceAccess(instanceId, context)
|
_mandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||||
chatInterface = _getChatInterface(context, featureInstanceId=instanceId, mandateId=_mandateId)
|
chatInterface = _getChatInterface(context, featureInstanceId=instanceId, mandateId=_mandateId)
|
||||||
messages = chatInterface.getMessages(workflowId) or []
|
messages = chatInterface.getMessages(workflowId) or []
|
||||||
|
|
@ -1124,7 +1140,62 @@ async def getWorkspaceMessages(
|
||||||
str(m.get("id") or ""),
|
str(m.get("id") or ""),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return JSONResponse({"messages": items})
|
attachedDsIds: List[str] = []
|
||||||
|
attachedFdsIds: List[str] = []
|
||||||
|
try:
|
||||||
|
wf = chatInterface.getWorkflow(workflowId)
|
||||||
|
if wf:
|
||||||
|
attachedDsIds = list(getattr(wf, "attachedDataSourceIds", None) or [])
|
||||||
|
attachedFdsIds = list(getattr(wf, "attachedFeatureDataSourceIds", None) or [])
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"getWorkspaceMessages: cannot read attachments for {workflowId}: {e}")
|
||||||
|
return JSONResponse({
|
||||||
|
"messages": items,
|
||||||
|
"attachedDataSourceIds": attachedDsIds,
|
||||||
|
"attachedFeatureDataSourceIds": attachedFdsIds,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateChatAttachmentsRequest(BaseModel):
|
||||||
|
"""Body for PATCH /workflows/{workflowId}/attachments.
|
||||||
|
|
||||||
|
Replaces the persisted attachment lists for the chat. Sent when the user
|
||||||
|
detaches a source via the WorkspaceInput chip-bar so the change survives
|
||||||
|
a chat reload without waiting for the next sendMessage round-trip.
|
||||||
|
"""
|
||||||
|
dataSourceIds: Optional[List[str]] = Field(default=None)
|
||||||
|
featureDataSourceIds: Optional[List[str]] = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{instanceId}/workflows/{workflowId}/attachments")
|
||||||
|
@limiter.limit("300/minute")
|
||||||
|
async def patchWorkspaceWorkflowAttachments(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(...),
|
||||||
|
workflowId: str = Path(...),
|
||||||
|
body: UpdateChatAttachmentsRequest = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Persist the chip-bar attachment IDs for a chat (per-chat sources)."""
|
||||||
|
_mandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||||
|
chatInterface = _getChatInterface(context, featureInstanceId=instanceId, mandateId=_mandateId)
|
||||||
|
workflow = chatInterface.getWorkflow(workflowId)
|
||||||
|
if not workflow:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Workflow {workflowId} not found")
|
||||||
|
updateData: Dict[str, Any] = {}
|
||||||
|
if body.dataSourceIds is not None:
|
||||||
|
updateData["attachedDataSourceIds"] = list(body.dataSourceIds)
|
||||||
|
if body.featureDataSourceIds is not None:
|
||||||
|
updateData["attachedFeatureDataSourceIds"] = list(body.featureDataSourceIds)
|
||||||
|
if updateData:
|
||||||
|
chatInterface.updateWorkflow(workflowId, updateData)
|
||||||
|
return JSONResponse({
|
||||||
|
"workflowId": workflowId,
|
||||||
|
"attachedDataSourceIds": updateData.get("attachedDataSourceIds",
|
||||||
|
list(getattr(workflow, "attachedDataSourceIds", None) or [])),
|
||||||
|
"attachedFeatureDataSourceIds": updateData.get("attachedFeatureDataSourceIds",
|
||||||
|
list(getattr(workflow, "attachedFeatureDataSourceIds", None) or [])),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -1461,13 +1532,31 @@ async def listFeatureConnectionTables(
|
||||||
except Exception:
|
except Exception:
|
||||||
accessible = catalog.getDataObjects(inst.featureCode)
|
accessible = catalog.getDataObjects(inst.featureCode)
|
||||||
|
|
||||||
tables = []
|
accessibleKeys = {obj.get("objectKey", "") for obj in accessible}
|
||||||
|
referencedGroups = set()
|
||||||
for obj in accessible:
|
for obj in accessible:
|
||||||
|
meta = obj.get("meta", {})
|
||||||
|
if meta.get("wildcard") or meta.get("isGroup"):
|
||||||
|
continue
|
||||||
|
if meta.get("group"):
|
||||||
|
referencedGroups.add(meta["group"])
|
||||||
|
|
||||||
|
tables = []
|
||||||
|
for obj in catalog.getDataObjects(inst.featureCode):
|
||||||
meta = obj.get("meta", {})
|
meta = obj.get("meta", {})
|
||||||
if meta.get("wildcard"):
|
if meta.get("wildcard"):
|
||||||
continue
|
continue
|
||||||
|
objectKey = obj.get("objectKey", "")
|
||||||
|
if meta.get("isGroup"):
|
||||||
|
# Groups are metadata-only; include if at least one child is accessible
|
||||||
|
# (regardless of whether the group itself was RBAC-granted).
|
||||||
|
if objectKey not in referencedGroups:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if objectKey not in accessibleKeys:
|
||||||
|
continue
|
||||||
node = {
|
node = {
|
||||||
"objectKey": obj.get("objectKey", ""),
|
"objectKey": objectKey,
|
||||||
"tableName": meta.get("table", ""),
|
"tableName": meta.get("table", ""),
|
||||||
"label": resolveText(obj.get("label", "")),
|
"label": resolveText(obj.get("label", "")),
|
||||||
"fields": meta.get("fields", []),
|
"fields": meta.get("fields", []),
|
||||||
|
|
@ -1475,6 +1564,8 @@ async def listFeatureConnectionTables(
|
||||||
"parentTable": meta.get("parentTable") or None,
|
"parentTable": meta.get("parentTable") or None,
|
||||||
"parentKey": meta.get("parentKey") or None,
|
"parentKey": meta.get("parentKey") or None,
|
||||||
"displayFields": meta.get("displayFields", []),
|
"displayFields": meta.get("displayFields", []),
|
||||||
|
"isGroup": bool(meta.get("isGroup", False)),
|
||||||
|
"group": meta.get("group") or None,
|
||||||
}
|
}
|
||||||
tables.append(node)
|
tables.append(node)
|
||||||
|
|
||||||
|
|
@ -1488,9 +1579,15 @@ async def listParentObjects(
|
||||||
instanceId: str = Path(...),
|
instanceId: str = Path(...),
|
||||||
fiId: str = Path(..., description="Feature instance ID"),
|
fiId: str = Path(..., description="Feature instance ID"),
|
||||||
tableName: str = Path(..., description="Parent table name from DATA_OBJECTS"),
|
tableName: str = Path(..., description="Parent table name from DATA_OBJECTS"),
|
||||||
|
parentKey: Optional[str] = Query(None, description="Optional FK column name to filter by ancestor record (nested parent rendering)"),
|
||||||
|
parentValue: Optional[str] = Query(None, description="Optional FK value matching parentKey to filter children of a specific ancestor record"),
|
||||||
context: RequestContext = Depends(getRequestContext),
|
context: RequestContext = Depends(getRequestContext),
|
||||||
):
|
):
|
||||||
"""List records from a parent table so the user can pick a specific record to scope data."""
|
"""List records from a parent table so the user can pick a specific record to scope data.
|
||||||
|
|
||||||
|
When parentKey + parentValue are provided, results are additionally filtered by that FK,
|
||||||
|
enabling nested record hierarchies (e.g. Sessions OF Context X).
|
||||||
|
"""
|
||||||
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
wsMandateId, _ = _validateInstanceAccess(instanceId, context)
|
||||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
from modules.security.rbacCatalog import getCatalogService
|
from modules.security.rbacCatalog import getCatalogService
|
||||||
|
|
@ -1561,6 +1658,22 @@ async def listParentObjects(
|
||||||
if hasUserId:
|
if hasUserId:
|
||||||
sql += ' AND "userId" = %s'
|
sql += ' AND "userId" = %s'
|
||||||
params.append(str(context.user.id))
|
params.append(str(context.user.id))
|
||||||
|
|
||||||
|
if parentKey and parentValue:
|
||||||
|
cur.execute(
|
||||||
|
"SELECT 1 FROM information_schema.columns "
|
||||||
|
"WHERE table_schema = 'public' AND LOWER(table_name) = LOWER(%s) "
|
||||||
|
"AND column_name = %s",
|
||||||
|
[tableName, parentKey],
|
||||||
|
)
|
||||||
|
if cur.rowcount > 0:
|
||||||
|
sql += f' AND "{parentKey}" = %s'
|
||||||
|
params.append(parentValue)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"listParentObjects({tableName}): ignoring parentKey '{parentKey}' (column does not exist)"
|
||||||
|
)
|
||||||
|
|
||||||
sql += ' ORDER BY "id" DESC LIMIT 100'
|
sql += ' ORDER BY "id" DESC LIMIT 100'
|
||||||
cur.execute(sql, params)
|
cur.execute(sql, params)
|
||||||
rows = []
|
rows = []
|
||||||
|
|
|
||||||
|
|
@ -1906,6 +1906,7 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
|
||||||
"resource.store.workspace",
|
"resource.store.workspace",
|
||||||
"resource.store.commcoach",
|
"resource.store.commcoach",
|
||||||
"resource.store.trustee",
|
"resource.store.trustee",
|
||||||
|
"resource.store.graphicalEditor",
|
||||||
]
|
]
|
||||||
|
|
||||||
storeRules = []
|
storeRules = []
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached
|
||||||
from modules.shared.configuration import APP_CONFIG
|
from modules.shared.configuration import APP_CONFIG
|
||||||
from modules.shared.dbRegistry import registerDatabase
|
from modules.shared.dbRegistry import registerDatabase
|
||||||
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
|
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
|
||||||
|
from modules.shared.i18nRegistry import resolveText
|
||||||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
||||||
from modules.security.rbac import RbacClass
|
from modules.security.rbac import RbacClass
|
||||||
from modules.datamodels.datamodelUam import (
|
from modules.datamodels.datamodelUam import (
|
||||||
|
|
@ -1639,7 +1640,7 @@ class AppObjects:
|
||||||
if not featureDef.get("autoCreateInstance", False):
|
if not featureDef.get("autoCreateInstance", False):
|
||||||
continue
|
continue
|
||||||
featureCode = featureDef.get("code", featureName)
|
featureCode = featureDef.get("code", featureName)
|
||||||
featureLabel = featureDef.get("label", {}).get("en", featureName)
|
featureLabel = resolveText(featureDef.get("label", featureName))
|
||||||
instance = featureInterface.createFeatureInstance(
|
instance = featureInterface.createFeatureInstance(
|
||||||
featureCode=featureCode,
|
featureCode=featureCode,
|
||||||
mandateId=mandateId,
|
mandateId=mandateId,
|
||||||
|
|
|
||||||
|
|
@ -1839,10 +1839,14 @@ class BillingObjects:
|
||||||
userId: Optional[str] = None,
|
userId: Optional[str] = None,
|
||||||
startTs: Optional[float] = None,
|
startTs: Optional[float] = None,
|
||||||
endTs: Optional[float] = None,
|
endTs: Optional[float] = None,
|
||||||
period: str = "month",
|
bucketSize: str = "month",
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Pure SQL aggregation for statistics. No row-level loading.
|
Pure SQL aggregation for statistics. No row-level loading.
|
||||||
|
|
||||||
|
`bucketSize` controls only the time-series aggregation granularity
|
||||||
|
(`'day' | 'month' | 'year'`); the date range is set via `startTs`/`endTs`.
|
||||||
|
|
||||||
Returns: totalCost, transactionCount, costByProvider, costByModel,
|
Returns: totalCost, transactionCount, costByProvider, costByModel,
|
||||||
costByFeature, costByAccountId, timeSeries
|
costByFeature, costByAccountId, timeSeries
|
||||||
"""
|
"""
|
||||||
|
|
@ -1909,10 +1913,17 @@ class BillingObjects:
|
||||||
]
|
]
|
||||||
|
|
||||||
# 6) Time series via DATE_TRUNC on epoch timestamp
|
# 6) Time series via DATE_TRUNC on epoch timestamp
|
||||||
if period == "day":
|
_bucketSpec = {
|
||||||
truncExpr = "DATE_TRUNC('day', TO_TIMESTAMP(\"sysCreatedAt\"))"
|
"day": ("day", "%Y-%m-%d"),
|
||||||
else:
|
"month": ("month", "%Y-%m"),
|
||||||
truncExpr = "DATE_TRUNC('month', TO_TIMESTAMP(\"sysCreatedAt\"))"
|
"year": ("year", "%Y"),
|
||||||
|
}.get(bucketSize)
|
||||||
|
if _bucketSpec is None:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid bucketSize: {bucketSize!r} (expected day|month|year)"
|
||||||
|
)
|
||||||
|
_truncUnit, _labelFormat = _bucketSpec
|
||||||
|
truncExpr = f"DATE_TRUNC('{_truncUnit}', TO_TIMESTAMP(\"sysCreatedAt\"))"
|
||||||
|
|
||||||
cur.execute(
|
cur.execute(
|
||||||
f'SELECT {truncExpr} AS bucket, SUM("amount") AS total, COUNT(*) AS cnt '
|
f'SELECT {truncExpr} AS bucket, SUM("amount") AS total, COUNT(*) AS cnt '
|
||||||
|
|
@ -1923,10 +1934,7 @@ class BillingObjects:
|
||||||
timeSeries = []
|
timeSeries = []
|
||||||
for r in cur.fetchall():
|
for r in cur.fetchall():
|
||||||
bucket = r["bucket"]
|
bucket = r["bucket"]
|
||||||
if period == "day":
|
label = bucket.strftime(_labelFormat) if bucket else "unknown"
|
||||||
label = bucket.strftime("%Y-%m-%d") if bucket else "unknown"
|
|
||||||
else:
|
|
||||||
label = bucket.strftime("%Y-%m") if bucket else "unknown"
|
|
||||||
timeSeries.append({
|
timeSeries.append({
|
||||||
"date": label,
|
"date": label,
|
||||||
"cost": round(float(r["total"]), 4),
|
"cost": round(float(r["total"]), 4),
|
||||||
|
|
|
||||||
|
|
@ -219,6 +219,22 @@ class ChatObjects:
|
||||||
# Everything else is an object
|
# Everything else is an object
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _unwrapOptional(self, fieldType):
|
||||||
|
"""Unwrap ``Optional[X]`` / ``Union[X, None]`` to ``X``.
|
||||||
|
|
||||||
|
The generic JSONB detection in ``_separateObjectFields`` checks
|
||||||
|
``__origin__`` against ``(dict, list)``. For ``Optional[List[str]]``
|
||||||
|
the origin is ``Union``, so JSONB fields declared as ``Optional[...]``
|
||||||
|
would silently fall through to ``objectFields`` and be dropped on
|
||||||
|
write. Unwrapping the Optional first keeps the existing detection
|
||||||
|
intact while supporting nullable JSONB columns.
|
||||||
|
"""
|
||||||
|
if getattr(fieldType, '__origin__', None) is Union:
|
||||||
|
nonNone = [a for a in getattr(fieldType, '__args__', ()) if a is not type(None)]
|
||||||
|
if len(nonNone) == 1:
|
||||||
|
return nonNone[0]
|
||||||
|
return fieldType
|
||||||
|
|
||||||
def _separateObjectFields(self, model_class, data: Dict[str, Any]) -> tuple[Dict[str, Any], Dict[str, Any]]:
|
def _separateObjectFields(self, model_class, data: Dict[str, Any]) -> tuple[Dict[str, Any], Dict[str, Any]]:
|
||||||
"""Separate simple fields from object fields based on Pydantic model structure."""
|
"""Separate simple fields from object fields based on Pydantic model structure."""
|
||||||
simpleFields = {}
|
simpleFields = {}
|
||||||
|
|
@ -232,7 +248,7 @@ class ChatObjects:
|
||||||
if fieldName in modelFields:
|
if fieldName in modelFields:
|
||||||
fieldInfo = modelFields[fieldName]
|
fieldInfo = modelFields[fieldName]
|
||||||
# Pydantic v2 only
|
# Pydantic v2 only
|
||||||
fieldType = fieldInfo.annotation
|
fieldType = self._unwrapOptional(fieldInfo.annotation)
|
||||||
|
|
||||||
# Always route relational/object fields to object_fields for separate handling
|
# Always route relational/object fields to object_fields for separate handling
|
||||||
# These fields are stored in separate normalized tables, not as JSONB
|
# These fields are stored in separate normalized tables, not as JSONB
|
||||||
|
|
@ -734,7 +750,9 @@ class ChatObjects:
|
||||||
lastActivity=_toFloat(workflow.get("lastActivity")),
|
lastActivity=_toFloat(workflow.get("lastActivity")),
|
||||||
startedAt=_toFloat(workflow.get("startedAt")),
|
startedAt=_toFloat(workflow.get("startedAt")),
|
||||||
logs=logs,
|
logs=logs,
|
||||||
messages=messages
|
messages=messages,
|
||||||
|
attachedDataSourceIds=workflow.get("attachedDataSourceIds") or [],
|
||||||
|
attachedFeatureDataSourceIds=workflow.get("attachedFeatureDataSourceIds") or [],
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"getWorkflow: data validation failed for {workflowId}: {e}")
|
logger.error(f"getWorkflow: data validation failed for {workflowId}: {e}")
|
||||||
|
|
@ -891,7 +909,13 @@ class ChatObjects:
|
||||||
lastActivity=updated.get("lastActivity", workflow.lastActivity),
|
lastActivity=updated.get("lastActivity", workflow.lastActivity),
|
||||||
startedAt=updated.get("startedAt", workflow.startedAt),
|
startedAt=updated.get("startedAt", workflow.startedAt),
|
||||||
logs=logs,
|
logs=logs,
|
||||||
messages=messages
|
messages=messages,
|
||||||
|
attachedDataSourceIds=updated.get("attachedDataSourceIds")
|
||||||
|
if updated.get("attachedDataSourceIds") is not None
|
||||||
|
else (getattr(workflow, "attachedDataSourceIds", None) or []),
|
||||||
|
attachedFeatureDataSourceIds=updated.get("attachedFeatureDataSourceIds")
|
||||||
|
if updated.get("attachedFeatureDataSourceIds") is not None
|
||||||
|
else (getattr(workflow, "attachedFeatureDataSourceIds", None) or []),
|
||||||
)
|
)
|
||||||
|
|
||||||
def deleteWorkflow(self, workflowId: str) -> bool:
|
def deleteWorkflow(self, workflowId: str) -> bool:
|
||||||
|
|
|
||||||
|
|
@ -104,6 +104,49 @@ class FeatureInterface:
|
||||||
logger.error(f"Error creating feature {code}: {e}")
|
logger.error(f"Error creating feature {code}: {e}")
|
||||||
raise ValueError(f"Failed to create feature: {e}")
|
raise ValueError(f"Failed to create feature: {e}")
|
||||||
|
|
||||||
|
def upsertFeature(self, code: str, label: Any, icon: str = "mdi-puzzle") -> str:
|
||||||
|
"""Insert or update a Feature row for ``code``.
|
||||||
|
|
||||||
|
Idempotent counterpart to :meth:`createFeature` used by the boot-time
|
||||||
|
sync (see ``modules.system.registry.syncCatalogFeaturesToDb``) so the
|
||||||
|
``Feature`` DB-table stays consistent with the in-memory feature
|
||||||
|
registry built from the code modules. Without this sync the
|
||||||
|
``FeatureInstance.featureCode`` FK would be dangling for every
|
||||||
|
feature whose definition lives only in code (the user-reported
|
||||||
|
false-positive orphans).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
code: Unique feature code (e.g. ``"trustee"``).
|
||||||
|
label: Either a string (the source label, will be wrapped as
|
||||||
|
``{"xx": label}``), a dict ``{"xx": ..., "de": ..., ...}``
|
||||||
|
or an existing TextMultilingual instance.
|
||||||
|
icon: Icon identifier.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
One of ``"created"``, ``"updated"``, ``"unchanged"``.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
normalizedLabel = coerce_text_multilingual(label) if not isinstance(label, dict) else label
|
||||||
|
existing = self.getFeature(code)
|
||||||
|
if existing is None:
|
||||||
|
self.createFeature(code, normalizedLabel.model_dump() if hasattr(normalizedLabel, "model_dump") else normalizedLabel, icon)
|
||||||
|
return "created"
|
||||||
|
|
||||||
|
existingLabel = existing.label.model_dump() if hasattr(existing.label, "model_dump") else existing.label
|
||||||
|
desiredLabel = normalizedLabel.model_dump() if hasattr(normalizedLabel, "model_dump") else normalizedLabel
|
||||||
|
updateData: Dict[str, Any] = {}
|
||||||
|
if existingLabel != desiredLabel:
|
||||||
|
updateData["label"] = desiredLabel
|
||||||
|
if (existing.icon or "") != (icon or ""):
|
||||||
|
updateData["icon"] = icon or ""
|
||||||
|
if not updateData:
|
||||||
|
return "unchanged"
|
||||||
|
self.db.recordModify(Feature, code, updateData)
|
||||||
|
return "updated"
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error upserting feature {code}: {e}")
|
||||||
|
raise ValueError(f"Failed to upsert feature: {e}")
|
||||||
|
|
||||||
# ============================================
|
# ============================================
|
||||||
# Feature Instance Methods
|
# Feature Instance Methods
|
||||||
# ============================================
|
# ============================================
|
||||||
|
|
@ -201,9 +244,21 @@ class FeatureInterface:
|
||||||
if copyTemplateRoles:
|
if copyTemplateRoles:
|
||||||
self._copyTemplateRoles(featureCode, mandateId, instanceId)
|
self._copyTemplateRoles(featureCode, mandateId, instanceId)
|
||||||
|
|
||||||
# Copy template workflows (if feature defines TEMPLATE_WORKFLOWS)
|
# Copy template workflows (if feature defines TEMPLATE_WORKFLOWS).
|
||||||
self._copyTemplateWorkflows(featureCode, mandateId, instanceId)
|
# WICHTIG: Workflow-Bootstrap darf die Instanz-Erstellung NICHT killen
|
||||||
|
# (Instanz + Rollen sind primaer; Workflows kann Admin via Sync nachladen).
|
||||||
|
# Fehler werden aber laut geloggt, damit sie nicht unbemerkt bleiben.
|
||||||
|
try:
|
||||||
|
self._copyTemplateWorkflows(featureCode, mandateId, instanceId)
|
||||||
|
except Exception as wfErr:
|
||||||
|
logger.error(
|
||||||
|
f"createFeatureInstance: workflow bootstrap FAILED for feature "
|
||||||
|
f"'{featureCode}' instance {instanceId} — instance was created but "
|
||||||
|
f"workflows are missing. Use POST /api/features/instances/{instanceId}"
|
||||||
|
f"/sync-workflows to recover. Reason: {wfErr}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
cleanedRecord = dict(createdInstance)
|
cleanedRecord = dict(createdInstance)
|
||||||
return FeatureInstance(**cleanedRecord)
|
return FeatureInstance(**cleanedRecord)
|
||||||
|
|
||||||
|
|
@ -227,31 +282,57 @@ class FeatureInterface:
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Number of workflows copied
|
Number of workflows copied
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If templates exist but cannot be copied.
|
||||||
|
Caller decides whether to swallow or re-raise.
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
import importlib
|
|
||||||
|
from modules.system.registry import loadFeatureMainModules
|
||||||
|
mainModules = loadFeatureMainModules()
|
||||||
|
featureModule = mainModules.get(featureCode)
|
||||||
|
if not featureModule:
|
||||||
|
logger.debug(
|
||||||
|
f"_copyTemplateWorkflows: no main module loaded for feature '{featureCode}' — nothing to copy"
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
getTemplateWorkflows = getattr(featureModule, "getTemplateWorkflows", None)
|
||||||
|
if not getTemplateWorkflows:
|
||||||
|
logger.debug(
|
||||||
|
f"_copyTemplateWorkflows: feature '{featureCode}' has no getTemplateWorkflows() — nothing to copy"
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from modules.system.registry import loadFeatureMainModules
|
templateWorkflows = getTemplateWorkflows() or []
|
||||||
mainModules = loadFeatureMainModules()
|
except Exception as e:
|
||||||
featureModule = mainModules.get(featureCode)
|
logger.error(
|
||||||
if not featureModule:
|
f"_copyTemplateWorkflows: getTemplateWorkflows() raised for feature '{featureCode}': {e}",
|
||||||
return 0
|
exc_info=True,
|
||||||
getTemplateWorkflows = getattr(featureModule, "getTemplateWorkflows", None)
|
)
|
||||||
if not getTemplateWorkflows:
|
raise RuntimeError(
|
||||||
return 0
|
f"Feature '{featureCode}' getTemplateWorkflows() failed: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
templateWorkflows = getTemplateWorkflows()
|
if not templateWorkflows:
|
||||||
if not templateWorkflows:
|
return 0
|
||||||
return 0
|
|
||||||
|
|
||||||
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
|
logger.info(
|
||||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
f"_copyTemplateWorkflows: copying {len(templateWorkflows)} template workflow(s) "
|
||||||
rootUser = getRootInterface().currentUser
|
f"for feature '{featureCode}' to instance {instanceId} (mandate={mandateId})"
|
||||||
geInterface = getGraphicalEditorInterface(rootUser, mandateId, instanceId)
|
)
|
||||||
|
|
||||||
copied = 0
|
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
|
||||||
for template in templateWorkflows:
|
from modules.security.rootAccess import getRootUser
|
||||||
|
rootUser = getRootUser()
|
||||||
|
geInterface = getGraphicalEditorInterface(rootUser, mandateId, instanceId)
|
||||||
|
|
||||||
|
copied = 0
|
||||||
|
failed = 0
|
||||||
|
for template in templateWorkflows:
|
||||||
|
templateId = template.get("id", "<no-id>")
|
||||||
|
try:
|
||||||
graphJson = json.dumps(template.get("graph", {}))
|
graphJson = json.dumps(template.get("graph", {}))
|
||||||
graphJson = graphJson.replace("{{featureInstanceId}}", instanceId)
|
graphJson = graphJson.replace("{{featureInstanceId}}", instanceId)
|
||||||
graph = json.loads(graphJson)
|
graph = json.loads(graphJson)
|
||||||
|
|
@ -263,22 +344,30 @@ class FeatureInterface:
|
||||||
"graph": graph,
|
"graph": graph,
|
||||||
"tags": template.get("tags", [f"feature:{featureCode}"]),
|
"tags": template.get("tags", [f"feature:{featureCode}"]),
|
||||||
"isTemplate": False,
|
"isTemplate": False,
|
||||||
"templateSourceId": template["id"],
|
"templateSourceId": templateId,
|
||||||
"templateScope": "instance",
|
"templateScope": "instance",
|
||||||
"active": True,
|
"active": True,
|
||||||
})
|
})
|
||||||
copied += 1
|
copied += 1
|
||||||
|
except Exception as e:
|
||||||
|
failed += 1
|
||||||
|
logger.error(
|
||||||
|
f"_copyTemplateWorkflows: failed to create workflow '{templateId}' for "
|
||||||
|
f"feature '{featureCode}' instance {instanceId}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
if copied > 0:
|
if copied:
|
||||||
logger.info(f"Feature '{featureCode}': Copied {copied} template workflows to instance {instanceId}")
|
logger.info(
|
||||||
return copied
|
f"_copyTemplateWorkflows: copied {copied}/{len(templateWorkflows)} workflow(s) "
|
||||||
|
f"for feature '{featureCode}' instance {instanceId} (failed={failed})"
|
||||||
except ImportError:
|
)
|
||||||
logger.debug(f"No feature module found for '{featureCode}' — skipping workflow bootstrap")
|
if failed:
|
||||||
return 0
|
raise RuntimeError(
|
||||||
except Exception as e:
|
f"_copyTemplateWorkflows: {failed}/{len(templateWorkflows)} workflow(s) failed "
|
||||||
logger.warning(f"Error copying template workflows for '{featureCode}' instance {instanceId}: {e}")
|
f"for feature '{featureCode}' instance {instanceId}"
|
||||||
return 0
|
)
|
||||||
|
return copied
|
||||||
|
|
||||||
def _copyTemplateRoles(self, featureCode: str, mandateId: str, instanceId: str) -> int:
|
def _copyTemplateRoles(self, featureCode: str, mandateId: str, instanceId: str) -> int:
|
||||||
"""
|
"""
|
||||||
|
|
|
||||||
|
|
@ -14,9 +14,11 @@ from modules.auth import limiter
|
||||||
from modules.auth.authentication import requireSysAdmin
|
from modules.auth.authentication import requireSysAdmin
|
||||||
from modules.datamodels.datamodelUam import User
|
from modules.datamodels.datamodelUam import User
|
||||||
from modules.system.databaseHealth import (
|
from modules.system.databaseHealth import (
|
||||||
|
OrphanCleanupRefused,
|
||||||
_cleanAllOrphans,
|
_cleanAllOrphans,
|
||||||
_cleanOrphans,
|
_cleanOrphans,
|
||||||
_getTableStats,
|
_getTableStats,
|
||||||
|
_listOrphans,
|
||||||
_scanOrphans,
|
_scanOrphans,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -34,6 +36,19 @@ class OrphanCleanRequest(BaseModel):
|
||||||
db: str = Field(..., description="Source database name (e.g. poweron_app)")
|
db: str = Field(..., description="Source database name (e.g. poweron_app)")
|
||||||
table: str = Field(..., description="Source table (Pydantic model class name)")
|
table: str = Field(..., description="Source table (Pydantic model class name)")
|
||||||
column: str = Field(..., description="FK column on the source table")
|
column: str = Field(..., description="FK column on the source table")
|
||||||
|
force: bool = Field(
|
||||||
|
False,
|
||||||
|
description="Override safety guards (empty target / >50%% of source). Use with care.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OrphanCleanAllRequest(BaseModel):
|
||||||
|
"""Body for cleaning all detected orphans."""
|
||||||
|
|
||||||
|
force: bool = Field(
|
||||||
|
False,
|
||||||
|
description="Override safety guards on every relationship. Use with extreme care.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/stats")
|
@router.get("/stats")
|
||||||
|
|
@ -60,6 +75,39 @@ def getDatabaseOrphans(
|
||||||
return {"orphans": rows}
|
return {"orphans": rows}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/orphans/list")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def getDatabaseOrphansList(
|
||||||
|
request: Request,
|
||||||
|
db: str,
|
||||||
|
table: str,
|
||||||
|
column: str,
|
||||||
|
limit: int = 1000,
|
||||||
|
currentUser: User = Depends(requireSysAdmin),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Return up to ``limit`` actual orphan source-rows for one FK relationship.
|
||||||
|
|
||||||
|
Used by the SysAdmin UI's per-row download button: a human can review the
|
||||||
|
orphan list (full source row + the unresolved FK value) before triggering
|
||||||
|
the destructive clean operation.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
records = _listOrphans(db=db, table=table, column=column, limit=limit)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=str(e),
|
||||||
|
) from e
|
||||||
|
return {
|
||||||
|
"db": db,
|
||||||
|
"table": table,
|
||||||
|
"column": column,
|
||||||
|
"count": len(records),
|
||||||
|
"limit": limit,
|
||||||
|
"records": records,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/orphans/clean")
|
@router.post("/orphans/clean")
|
||||||
@limiter.limit("10/minute")
|
@limiter.limit("10/minute")
|
||||||
def postDatabaseOrphansClean(
|
def postDatabaseOrphansClean(
|
||||||
|
|
@ -69,19 +117,33 @@ def postDatabaseOrphansClean(
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Delete orphaned rows for a single FK relationship."""
|
"""Delete orphaned rows for a single FK relationship."""
|
||||||
try:
|
try:
|
||||||
deleted = _cleanOrphans(body.db, body.table, body.column)
|
deleted = _cleanOrphans(body.db, body.table, body.column, force=body.force)
|
||||||
|
except OrphanCleanupRefused as e:
|
||||||
|
logger.warning(
|
||||||
|
"SysAdmin orphan clean REFUSED: user=%s db=%s table=%s column=%s reason=%s",
|
||||||
|
currentUser.username,
|
||||||
|
body.db,
|
||||||
|
body.table,
|
||||||
|
body.column,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_409_CONFLICT,
|
||||||
|
detail={"refused": True, "reason": str(e)},
|
||||||
|
) from e
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
detail=str(e),
|
detail=str(e),
|
||||||
) from e
|
) from e
|
||||||
logger.info(
|
logger.info(
|
||||||
"SysAdmin orphan clean: user=%s db=%s table=%s column=%s deleted=%s",
|
"SysAdmin orphan clean: user=%s db=%s table=%s column=%s deleted=%s force=%s",
|
||||||
currentUser.username,
|
currentUser.username,
|
||||||
body.db,
|
body.db,
|
||||||
body.table,
|
body.table,
|
||||||
body.column,
|
body.column,
|
||||||
deleted,
|
deleted,
|
||||||
|
body.force,
|
||||||
)
|
)
|
||||||
return {"deleted": deleted}
|
return {"deleted": deleted}
|
||||||
|
|
||||||
|
|
@ -90,13 +152,26 @@ def postDatabaseOrphansClean(
|
||||||
@limiter.limit("2/minute")
|
@limiter.limit("2/minute")
|
||||||
def postDatabaseOrphansCleanAll(
|
def postDatabaseOrphansCleanAll(
|
||||||
request: Request,
|
request: Request,
|
||||||
|
body: Optional[OrphanCleanAllRequest] = None,
|
||||||
currentUser: User = Depends(requireSysAdmin),
|
currentUser: User = Depends(requireSysAdmin),
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Run orphan cleanup for every relationship that currently has orphans."""
|
"""Run orphan cleanup for every relationship that currently has orphans.
|
||||||
results: List[dict] = _cleanAllOrphans()
|
|
||||||
|
Returns per-relationship results. Each entry contains either `deleted` (success),
|
||||||
|
`skipped` (safety guard triggered, no force), or `error` (other failure).
|
||||||
|
"""
|
||||||
|
force = bool(body.force) if body is not None else False
|
||||||
|
results: List[dict] = _cleanAllOrphans(force=force)
|
||||||
|
skipped = sum(1 for r in results if "skipped" in r)
|
||||||
|
errored = sum(1 for r in results if "error" in r)
|
||||||
|
deletedTotal = sum(int(r.get("deleted", 0)) for r in results)
|
||||||
logger.info(
|
logger.info(
|
||||||
"SysAdmin orphan clean-all: user=%s batches=%s",
|
"SysAdmin orphan clean-all: user=%s batches=%s deleted=%s skipped=%s errored=%s force=%s",
|
||||||
currentUser.username,
|
currentUser.username,
|
||||||
len(results),
|
len(results),
|
||||||
|
deletedTotal,
|
||||||
|
skipped,
|
||||||
|
errored,
|
||||||
|
force,
|
||||||
)
|
)
|
||||||
return {"results": results}
|
return {"results": results, "skipped": skipped, "errored": errored, "deleted": deletedTotal}
|
||||||
|
|
|
||||||
|
|
@ -171,9 +171,16 @@ def get_my_feature_instances(
|
||||||
if mandate and not getattr(mandate, "enabled", True):
|
if mandate and not getattr(mandate, "enabled", True):
|
||||||
continue
|
continue
|
||||||
if mandate:
|
if mandate:
|
||||||
|
mandateName = mandate.name if hasattr(mandate, 'name') else mandateId
|
||||||
|
mandateLabel = (
|
||||||
|
mandate.label
|
||||||
|
if hasattr(mandate, 'label') and mandate.label
|
||||||
|
else mandateName
|
||||||
|
)
|
||||||
mandatesMap[mandateId] = {
|
mandatesMap[mandateId] = {
|
||||||
"id": mandateId,
|
"id": mandateId,
|
||||||
"name": mandate.name if hasattr(mandate, 'name') else mandateId,
|
"name": mandateName,
|
||||||
|
"label": mandateLabel,
|
||||||
"code": mandate.code if hasattr(mandate, 'code') else None,
|
"code": mandate.code if hasattr(mandate, 'code') else None,
|
||||||
"features": []
|
"features": []
|
||||||
}
|
}
|
||||||
|
|
@ -181,6 +188,7 @@ def get_my_feature_instances(
|
||||||
mandatesMap[mandateId] = {
|
mandatesMap[mandateId] = {
|
||||||
"id": mandateId,
|
"id": mandateId,
|
||||||
"name": mandateId,
|
"name": mandateId,
|
||||||
|
"label": mandateId,
|
||||||
"code": None,
|
"code": None,
|
||||||
"features": []
|
"features": []
|
||||||
}
|
}
|
||||||
|
|
@ -210,6 +218,7 @@ def get_my_feature_instances(
|
||||||
"featureCode": instance.featureCode,
|
"featureCode": instance.featureCode,
|
||||||
"mandateId": mandateId,
|
"mandateId": mandateId,
|
||||||
"mandateName": mandatesMap[mandateId]["name"],
|
"mandateName": mandatesMap[mandateId]["name"],
|
||||||
|
"mandateLabel": mandatesMap[mandateId]["label"],
|
||||||
"instanceLabel": instance.label,
|
"instanceLabel": instance.label,
|
||||||
"userRoles": userRoles,
|
"userRoles": userRoles,
|
||||||
"permissions": permissions
|
"permissions": permissions
|
||||||
|
|
|
||||||
|
|
@ -305,8 +305,10 @@ async def getAuditLog(
|
||||||
async def getAuditStats(
|
async def getAuditStats(
|
||||||
request: Request,
|
request: Request,
|
||||||
context: RequestContext = Depends(getRequestContext),
|
context: RequestContext = Depends(getRequestContext),
|
||||||
timeRange: int = Query(30, ge=1, le=365, description="Days to aggregate"),
|
dateFrom: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
groupBy: str = Query("model", description="Grouping: model, user, feature, day"),
|
dateTo: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
|
groupBy: str = Query("model", pattern="^(model|user|feature|day)$",
|
||||||
|
description="Grouping: model, user, feature, day"),
|
||||||
):
|
):
|
||||||
_requireAuditAccess(context)
|
_requireAuditAccess(context)
|
||||||
mandateId = str(context.mandateId) if context.mandateId else ""
|
mandateId = str(context.mandateId) if context.mandateId else ""
|
||||||
|
|
@ -314,7 +316,12 @@ async def getAuditStats(
|
||||||
raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
|
raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
|
||||||
|
|
||||||
from modules.shared.aiAuditLogger import aiAuditLogger
|
from modules.shared.aiAuditLogger import aiAuditLogger
|
||||||
return aiAuditLogger.getAiAuditStats(mandateId, timeRangeDays=timeRange, groupBy=groupBy)
|
from modules.shared.dateRange import isoDateRangeToLocalEpoch
|
||||||
|
|
||||||
|
fromTs, toTs = isoDateRangeToLocalEpoch(dateFrom, dateTo)
|
||||||
|
return aiAuditLogger.getAiAuditStats(
|
||||||
|
mandateId, fromTs=fromTs, toTs=toTs, groupBy=groupBy,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# ── Tab D: Neutralization Mappings ──
|
# ── Tab D: Neutralization Mappings ──
|
||||||
|
|
|
||||||
|
|
@ -258,8 +258,10 @@ class AccountSummary(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class UsageReportResponse(BaseModel):
|
class UsageReportResponse(BaseModel):
|
||||||
"""Usage report for a period."""
|
"""Usage report for an explicit date range."""
|
||||||
period: str
|
dateFrom: str
|
||||||
|
dateTo: str
|
||||||
|
bucketSize: str
|
||||||
totalCost: float
|
totalCost: float
|
||||||
transactionCount: int
|
transactionCount: int
|
||||||
costByProvider: Dict[str, float]
|
costByProvider: Dict[str, float]
|
||||||
|
|
@ -523,80 +525,69 @@ def getTransactions(
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
@router.get("/statistics/{period}", response_model=UsageReportResponse)
|
@router.get("/statistics", response_model=UsageReportResponse)
|
||||||
@limiter.limit("30/minute")
|
@limiter.limit("30/minute")
|
||||||
def getStatistics(
|
def getStatistics(
|
||||||
request: Request,
|
request: Request,
|
||||||
period: str = Path(..., description="Period: 'day', 'month', or 'year'"),
|
dateFrom: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
year: int = Query(..., description="Year"),
|
dateTo: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
month: Optional[int] = Query(None, description="Month (1-12, required for 'day' period)"),
|
bucketSize: str = Query(..., pattern="^(day|month|year)$",
|
||||||
|
description="Time-bucket granularity: day, month, or year"),
|
||||||
ctx: RequestContext = Depends(getRequestContext)
|
ctx: RequestContext = Depends(getRequestContext)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Get usage statistics for a period.
|
Get usage statistics for an explicit date range.
|
||||||
|
|
||||||
|
`dateFrom`/`dateTo` are inclusive local-day boundaries.
|
||||||
|
`bucketSize` controls the time-series aggregation granularity and is
|
||||||
|
independent of the chosen range.
|
||||||
"""
|
"""
|
||||||
|
from modules.shared.dateRange import parseIsoDateRange
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Validate period
|
startDate, toDateInclusive = parseIsoDateRange(dateFrom, dateTo)
|
||||||
if period not in ["day", "month", "year"]:
|
# `calculateStatisticsFromTransactions` expects a half-open
|
||||||
raise HTTPException(status_code=400, detail=routeApiMsg("Invalid period. Use 'day', 'month', or 'year'"))
|
# [startDate, endDate) interval, so widen the upper bound by one day.
|
||||||
|
from datetime import timedelta as _td
|
||||||
if period == "day" and not month:
|
endDate = toDateInclusive + _td(days=1)
|
||||||
raise HTTPException(status_code=400, detail=routeApiMsg("Month is required for 'day' period"))
|
|
||||||
|
|
||||||
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
|
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
|
||||||
settings = billingInterface.getSettings(ctx.mandateId)
|
settings = billingInterface.getSettings(ctx.mandateId)
|
||||||
|
|
||||||
|
emptyResponse = UsageReportResponse(
|
||||||
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
bucketSize=bucketSize,
|
||||||
|
totalCost=0.0,
|
||||||
|
transactionCount=0,
|
||||||
|
costByProvider={},
|
||||||
|
costByFeature={},
|
||||||
|
)
|
||||||
if not settings:
|
if not settings:
|
||||||
return UsageReportResponse(
|
return emptyResponse
|
||||||
period=period,
|
|
||||||
totalCost=0.0,
|
|
||||||
transactionCount=0,
|
|
||||||
costByProvider={},
|
|
||||||
costByFeature={}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Transactions are always on user accounts (audit trail)
|
# Transactions are always on user accounts (audit trail)
|
||||||
account = billingInterface.getUserAccount(ctx.mandateId, ctx.user.id)
|
account = billingInterface.getUserAccount(ctx.mandateId, ctx.user.id)
|
||||||
|
|
||||||
if not account:
|
if not account:
|
||||||
return UsageReportResponse(
|
return emptyResponse
|
||||||
period=period,
|
|
||||||
totalCost=0.0,
|
|
||||||
transactionCount=0,
|
|
||||||
costByProvider={},
|
|
||||||
costByFeature={}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Calculate date range
|
|
||||||
if period == "day":
|
|
||||||
startDate = date(year, month, 1)
|
|
||||||
if month == 12:
|
|
||||||
endDate = date(year + 1, 1, 1)
|
|
||||||
else:
|
|
||||||
endDate = date(year, month + 1, 1)
|
|
||||||
elif period == "month":
|
|
||||||
startDate = date(year, 1, 1)
|
|
||||||
endDate = date(year + 1, 1, 1)
|
|
||||||
else: # year
|
|
||||||
startDate = date(year, 1, 1)
|
|
||||||
endDate = date(year + 1, 1, 1)
|
|
||||||
|
|
||||||
# Get statistics from transactions
|
|
||||||
stats = billingInterface.calculateStatisticsFromTransactions(
|
stats = billingInterface.calculateStatisticsFromTransactions(
|
||||||
account["id"],
|
account["id"],
|
||||||
startDate,
|
startDate,
|
||||||
endDate
|
endDate,
|
||||||
)
|
)
|
||||||
|
|
||||||
return UsageReportResponse(
|
return UsageReportResponse(
|
||||||
period=period,
|
dateFrom=dateFrom,
|
||||||
|
dateTo=dateTo,
|
||||||
|
bucketSize=bucketSize,
|
||||||
totalCost=stats.get("totalCostCHF", 0.0),
|
totalCost=stats.get("totalCostCHF", 0.0),
|
||||||
transactionCount=stats.get("transactionCount", 0),
|
transactionCount=stats.get("transactionCount", 0),
|
||||||
costByProvider=stats.get("costByProvider", {}),
|
costByProvider=stats.get("costByProvider", {}),
|
||||||
costByModel=stats.get("costByModel", {}),
|
costByModel=stats.get("costByModel", {}),
|
||||||
costByFeature=stats.get("costByFeature", {})
|
costByFeature=stats.get("costByFeature", {}),
|
||||||
)
|
)
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
@ -778,6 +769,21 @@ def addCredit(
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/checkout/amounts", response_model=List[float])
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def getCheckoutAmounts(
|
||||||
|
request: Request,
|
||||||
|
ctx: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Return the server-side allow-list of CHF top-up amounts for Stripe Checkout.
|
||||||
|
The frontend must populate its dropdown from this list — values not in
|
||||||
|
the list are rejected by `create_checkout_session` (server-side validation).
|
||||||
|
"""
|
||||||
|
from modules.serviceCenter.services.serviceBilling.stripeCheckout import ALLOWED_AMOUNTS_CHF
|
||||||
|
return [float(a) for a in ALLOWED_AMOUNTS_CHF]
|
||||||
|
|
||||||
|
|
||||||
@router.post("/checkout/create/{targetMandateId}", response_model=CheckoutCreateResponse)
|
@router.post("/checkout/create/{targetMandateId}", response_model=CheckoutCreateResponse)
|
||||||
@limiter.limit("10/minute")
|
@limiter.limit("10/minute")
|
||||||
def createCheckoutSession(
|
def createCheckoutSession(
|
||||||
|
|
@ -800,12 +806,37 @@ def createCheckoutSession(
|
||||||
if not _isAdminOfMandate(ctx, targetMandateId):
|
if not _isAdminOfMandate(ctx, targetMandateId):
|
||||||
raise HTTPException(status_code=403, detail=routeApiMsg("Mandate admin role required to load mandate credit"))
|
raise HTTPException(status_code=403, detail=routeApiMsg("Mandate admin role required to load mandate credit"))
|
||||||
|
|
||||||
|
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||||
|
appInterface = getAppInterface(ctx.user, mandateId=targetMandateId)
|
||||||
|
mandateRecord = appInterface.getMandate(targetMandateId)
|
||||||
|
if mandateRecord is not None:
|
||||||
|
mandateLabel = getattr(mandateRecord, "label", None) or getattr(mandateRecord, "name", None) or targetMandateId
|
||||||
|
invoiceAddress = {
|
||||||
|
"companyName": getattr(mandateRecord, "invoiceCompanyName", None),
|
||||||
|
"contactName": getattr(mandateRecord, "invoiceContactName", None),
|
||||||
|
"email": getattr(mandateRecord, "invoiceEmail", None),
|
||||||
|
"line1": getattr(mandateRecord, "invoiceLine1", None),
|
||||||
|
"line2": getattr(mandateRecord, "invoiceLine2", None),
|
||||||
|
"postalCode": getattr(mandateRecord, "invoicePostalCode", None),
|
||||||
|
"city": getattr(mandateRecord, "invoiceCity", None),
|
||||||
|
"state": getattr(mandateRecord, "invoiceState", None),
|
||||||
|
"country": getattr(mandateRecord, "invoiceCountry", None) or "CH",
|
||||||
|
"vatNumber": getattr(mandateRecord, "invoiceVatNumber", None),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
mandateLabel = targetMandateId
|
||||||
|
invoiceAddress = None
|
||||||
|
|
||||||
from modules.serviceCenter.services.serviceBilling.stripeCheckout import create_checkout_session
|
from modules.serviceCenter.services.serviceBilling.stripeCheckout import create_checkout_session
|
||||||
redirect_url = create_checkout_session(
|
redirect_url = create_checkout_session(
|
||||||
mandate_id=targetMandateId,
|
mandate_id=targetMandateId,
|
||||||
user_id=checkoutRequest.userId,
|
user_id=checkoutRequest.userId,
|
||||||
amount_chf=checkoutRequest.amount,
|
amount_chf=checkoutRequest.amount,
|
||||||
return_url=checkoutRequest.returnUrl
|
return_url=checkoutRequest.returnUrl,
|
||||||
|
mandate_label=mandateLabel,
|
||||||
|
invoice_address=invoiceAddress,
|
||||||
|
settings=settings,
|
||||||
|
billing_interface=billingInterface,
|
||||||
)
|
)
|
||||||
return CheckoutCreateResponse(redirectUrl=redirect_url)
|
return CheckoutCreateResponse(redirectUrl=redirect_url)
|
||||||
|
|
||||||
|
|
@ -935,11 +966,34 @@ async def stripeWebhook(
|
||||||
return {"received": True}
|
return {"received": True}
|
||||||
|
|
||||||
session_dict = session.to_dict_recursive() if hasattr(session, "to_dict_recursive") else dict(session)
|
session_dict = session.to_dict_recursive() if hasattr(session, "to_dict_recursive") else dict(session)
|
||||||
result = _creditStripeSessionIfNeeded(billingInterface, session_dict, eventId=event_id)
|
try:
|
||||||
logger.info(
|
result = _creditStripeSessionIfNeeded(billingInterface, session_dict, eventId=event_id)
|
||||||
f"Stripe webhook processed session {result.sessionId}: "
|
logger.info(
|
||||||
f"credited={result.credited}, alreadyCredited={result.alreadyCredited}"
|
f"Stripe webhook processed session {result.sessionId}: "
|
||||||
)
|
f"credited={result.credited}, alreadyCredited={result.alreadyCredited}"
|
||||||
|
)
|
||||||
|
except HTTPException as he:
|
||||||
|
logger.error(
|
||||||
|
"Stripe webhook %s for session %s failed: status=%s detail=%s metadata=%s amount_total=%s",
|
||||||
|
event_id,
|
||||||
|
session_dict.get("id"),
|
||||||
|
he.status_code,
|
||||||
|
he.detail,
|
||||||
|
session_dict.get("metadata"),
|
||||||
|
session_dict.get("amount_total"),
|
||||||
|
)
|
||||||
|
if 400 <= he.status_code < 500 and event_id:
|
||||||
|
if not billingInterface.getStripeWebhookEventByEventId(event_id):
|
||||||
|
try:
|
||||||
|
billingInterface.createStripeWebhookEvent(event_id)
|
||||||
|
logger.warning(
|
||||||
|
"Marked Stripe event %s as processed (permanent 4xx) to stop retries",
|
||||||
|
event_id,
|
||||||
|
)
|
||||||
|
except Exception as markEx:
|
||||||
|
logger.error("Failed to mark Stripe event %s as processed: %s", event_id, markEx)
|
||||||
|
return {"received": True}
|
||||||
|
raise
|
||||||
return {"received": True}
|
return {"received": True}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1036,8 +1090,22 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
|
||||||
|
|
||||||
operative = subInterface.getOperativeForMandate(mandateId)
|
operative = subInterface.getOperativeForMandate(mandateId)
|
||||||
hasActivePredecessor = operative is not None and operative["id"] != subscriptionRecordId
|
hasActivePredecessor = operative is not None and operative["id"] != subscriptionRecordId
|
||||||
|
predecessorIsTrial = (
|
||||||
|
hasActivePredecessor
|
||||||
|
and operative.get("status") == SubscriptionStatusEnum.TRIALING.value
|
||||||
|
)
|
||||||
|
|
||||||
if hasActivePredecessor:
|
if hasActivePredecessor and predecessorIsTrial:
|
||||||
|
try:
|
||||||
|
subInterface.forceExpire(operative["id"])
|
||||||
|
logger.info(
|
||||||
|
"Trial subscription %s expired immediately for mandate %s due to paid upgrade %s",
|
||||||
|
operative["id"], mandateId, subscriptionRecordId,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to expire trial predecessor %s: %s", operative["id"], e)
|
||||||
|
toStatus = SubscriptionStatusEnum.ACTIVE
|
||||||
|
elif hasActivePredecessor:
|
||||||
toStatus = SubscriptionStatusEnum.SCHEDULED
|
toStatus = SubscriptionStatusEnum.SCHEDULED
|
||||||
if operative.get("recurring", True):
|
if operative.get("recurring", True):
|
||||||
operativeStripeId = operative.get("stripeSubscriptionId")
|
operativeStripeId = operative.get("stripeSubscriptionId")
|
||||||
|
|
@ -1583,9 +1651,10 @@ class ViewStatisticsResponse(BaseModel):
|
||||||
@limiter.limit("30/minute")
|
@limiter.limit("30/minute")
|
||||||
def getUserViewStatistics(
|
def getUserViewStatistics(
|
||||||
request: Request,
|
request: Request,
|
||||||
period: str = Query(default="month", description="Period: 'day' or 'month'"),
|
dateFrom: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
year: int = Query(default=None, description="Year"),
|
dateTo: str = Query(..., description="ISO YYYY-MM-DD (inclusive)"),
|
||||||
month: Optional[int] = Query(None, description="Month (1-12, required for period='day')"),
|
bucketSize: str = Query(..., pattern="^(day|month|year)$",
|
||||||
|
description="Time-bucket granularity: day, month, or year"),
|
||||||
scope: str = Query(default="all", description="Scope: 'personal' (own costs only), 'mandate' (filter by mandateId), 'all' (RBAC-filtered)"),
|
scope: str = Query(default="all", description="Scope: 'personal' (own costs only), 'mandate' (filter by mandateId), 'all' (RBAC-filtered)"),
|
||||||
mandateId: Optional[str] = Query(None, description="Mandate ID filter (used with scope='mandate')"),
|
mandateId: Optional[str] = Query(None, description="Mandate ID filter (used with scope='mandate')"),
|
||||||
onlyMine: Optional[bool] = Query(None, description="Additional filter: restrict to current user's transactions within the selected scope"),
|
onlyMine: Optional[bool] = Query(None, description="Additional filter: restrict to current user's transactions within the selected scope"),
|
||||||
|
|
@ -1593,24 +1662,23 @@ def getUserViewStatistics(
|
||||||
) -> ViewStatisticsResponse:
|
) -> ViewStatisticsResponse:
|
||||||
"""
|
"""
|
||||||
Get aggregated usage statistics across all user's mandates.
|
Get aggregated usage statistics across all user's mandates.
|
||||||
|
|
||||||
Scope:
|
Scope:
|
||||||
- personal: only the current user's own transactions (ignores admin role)
|
- personal: only the current user's own transactions (ignores admin role)
|
||||||
- mandate: transactions for a specific mandate (requires mandateId parameter)
|
- mandate: transactions for a specific mandate (requires mandateId parameter)
|
||||||
- all: RBAC-filtered (SysAdmin sees everything, admin sees mandate, user sees own)
|
- all: RBAC-filtered (SysAdmin sees everything, admin sees mandate, user sees own)
|
||||||
|
|
||||||
onlyMine: additional filter that restricts results to the current user's
|
onlyMine: additional filter that restricts results to the current user's
|
||||||
transactions while keeping the scope-based mandate selection.
|
transactions while keeping the scope-based mandate selection.
|
||||||
|
|
||||||
- period='month': returns monthly time series for the given year
|
|
||||||
- period='day': returns daily time series for the given month/year
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if year is None:
|
|
||||||
year = datetime.now().year
|
|
||||||
|
|
||||||
if period == "day" and not month:
|
`dateFrom`/`dateTo` are inclusive local-day boundaries. `bucketSize`
|
||||||
month = datetime.now().month
|
controls the time-series aggregation granularity and is independent of
|
||||||
|
the chosen range.
|
||||||
|
"""
|
||||||
|
from modules.shared.dateRange import isoDateRangeToLocalEpoch
|
||||||
|
|
||||||
|
try:
|
||||||
|
startTs, endTs = isoDateRangeToLocalEpoch(dateFrom, dateTo)
|
||||||
|
|
||||||
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
|
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
|
||||||
|
|
||||||
|
|
@ -1629,28 +1697,19 @@ def getUserViewStatistics(
|
||||||
|
|
||||||
personalUserId = str(ctx.user.id) if (scope == "personal" or onlyMine) else None
|
personalUserId = str(ctx.user.id) if (scope == "personal" or onlyMine) else None
|
||||||
|
|
||||||
if period == "day":
|
|
||||||
startDate = date(year, month, 1)
|
|
||||||
endDate = date(year + 1, 1, 1) if month == 12 else date(year, month + 1, 1)
|
|
||||||
else:
|
|
||||||
startDate = date(year, 1, 1)
|
|
||||||
endDate = date(year + 1, 1, 1)
|
|
||||||
|
|
||||||
startTs = datetime.combine(startDate, datetime.min.time()).timestamp()
|
|
||||||
endTs = datetime.combine(endDate, datetime.min.time()).timestamp()
|
|
||||||
|
|
||||||
agg = billingInterface.getTransactionStatisticsAggregated(
|
agg = billingInterface.getTransactionStatisticsAggregated(
|
||||||
mandateIds=loadMandateIds,
|
mandateIds=loadMandateIds,
|
||||||
scope=scope,
|
scope=scope,
|
||||||
userId=personalUserId,
|
userId=personalUserId,
|
||||||
startTs=startTs,
|
startTs=startTs,
|
||||||
endTs=endTs,
|
endTs=endTs,
|
||||||
period=period,
|
bucketSize=bucketSize,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"View statistics (SQL-aggregated): totalCost={agg['totalCost']}, "
|
f"View statistics (SQL-aggregated): totalCost={agg['totalCost']}, "
|
||||||
f"count={agg['transactionCount']}, period={period}, year={year}, month={month}"
|
f"count={agg['transactionCount']}, dateFrom={dateFrom}, dateTo={dateTo}, "
|
||||||
|
f"bucketSize={bucketSize}"
|
||||||
)
|
)
|
||||||
|
|
||||||
allAccounts = agg.get("_allAccounts", [])
|
allAccounts = agg.get("_allAccounts", [])
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,55 @@ routeApiMsg = apiRouteContext("routeDataFiles")
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolveFileWithScope(currentUser: User, context: RequestContext, fileId: str):
|
||||||
|
"""Returns (managementInterface, fileItem) with RBAC scoped to the file's own mandate/instance.
|
||||||
|
|
||||||
|
Files generated by workflows (e.g. AI report outputs) carry their own
|
||||||
|
mandateId/featureInstanceId. Direct download links via <a href> cannot send
|
||||||
|
custom scope headers, so we resolve the scope from the FileItem itself and
|
||||||
|
re-check RBAC in that scope.
|
||||||
|
|
||||||
|
Returns (None, None) if the file does not exist or the user lacks access
|
||||||
|
in the file's actual scope.
|
||||||
|
"""
|
||||||
|
requestMandateId = str(context.mandateId) if context.mandateId else None
|
||||||
|
requestInstanceId = str(context.featureInstanceId) if context.featureInstanceId else None
|
||||||
|
|
||||||
|
mgmt = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=requestMandateId,
|
||||||
|
featureInstanceId=requestInstanceId,
|
||||||
|
)
|
||||||
|
fileItem = mgmt.getFile(fileId)
|
||||||
|
if fileItem:
|
||||||
|
return mgmt, fileItem
|
||||||
|
|
||||||
|
metas = mgmt.db.getRecordset(FileItem, recordFilter={"id": fileId})
|
||||||
|
if not metas:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
meta = metas[0]
|
||||||
|
fileMandateId = meta.get("mandateId") or None
|
||||||
|
fileInstanceId = meta.get("featureInstanceId") or None
|
||||||
|
|
||||||
|
if not fileMandateId and not fileInstanceId:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
if fileMandateId == requestMandateId and fileInstanceId == requestInstanceId:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
scopedMgmt = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=fileMandateId,
|
||||||
|
featureInstanceId=fileInstanceId,
|
||||||
|
)
|
||||||
|
fileItem = scopedMgmt.getFile(fileId)
|
||||||
|
if not fileItem:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
return scopedMgmt, fileItem
|
||||||
|
|
||||||
|
|
||||||
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user):
|
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user):
|
||||||
"""Background task: pre-scan + extraction + knowledge indexing.
|
"""Background task: pre-scan + extraction + knowledge indexing.
|
||||||
Step 1: Structure Pre-Scan (AI-free) -> FileContentIndex (persisted)
|
Step 1: Structure Pre-Scan (AI-free) -> FileContentIndex (persisted)
|
||||||
|
|
@ -975,20 +1024,18 @@ def updateFileNeutralize(
|
||||||
def get_file(
|
def get_file(
|
||||||
request: Request,
|
request: Request,
|
||||||
fileId: str = Path(..., description="ID of the file"),
|
fileId: str = Path(..., description="ID of the file"),
|
||||||
currentUser: User = Depends(getCurrentUser)
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext)
|
||||||
) -> FileItem:
|
) -> FileItem:
|
||||||
"""Get a file"""
|
"""Get a file. Resolves the file's mandate/instance scope automatically."""
|
||||||
try:
|
try:
|
||||||
managementInterface = interfaceDbManagement.getInterface(currentUser)
|
_mgmt, fileData = _resolveFileWithScope(currentUser, context, fileId)
|
||||||
|
|
||||||
# Get file via LucyDOM interface from the database
|
|
||||||
fileData = managementInterface.getFile(fileId)
|
|
||||||
if not fileData:
|
if not fileData:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
detail=f"File with ID {fileId} not found"
|
detail=f"File with ID {fileId} not found"
|
||||||
)
|
)
|
||||||
|
|
||||||
return fileData
|
return fileData
|
||||||
|
|
||||||
except interfaceDbManagement.FileNotFoundError as e:
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
|
@ -1107,23 +1154,17 @@ def download_file(
|
||||||
currentUser: User = Depends(getCurrentUser),
|
currentUser: User = Depends(getCurrentUser),
|
||||||
context: RequestContext = Depends(getRequestContext)
|
context: RequestContext = Depends(getRequestContext)
|
||||||
) -> Response:
|
) -> Response:
|
||||||
"""Download a file. Uses mandate/instance context when present (e.g. from feature pages)."""
|
"""Download a file. Resolves the file's mandate/instance scope automatically,
|
||||||
|
so direct <a href> links work even when X-Mandate-Id / X-Instance-Id headers
|
||||||
|
are not sent by the browser."""
|
||||||
try:
|
try:
|
||||||
managementInterface = interfaceDbManagement.getInterface(
|
managementInterface, fileData = _resolveFileWithScope(currentUser, context, fileId)
|
||||||
currentUser,
|
|
||||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
|
||||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get file data
|
|
||||||
fileData = managementInterface.getFile(fileId)
|
|
||||||
if not fileData:
|
if not fileData:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
detail=f"File with ID {fileId} not found"
|
detail=f"File with ID {fileId} not found"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get file content
|
|
||||||
fileContent = managementInterface.getFileData(fileId)
|
fileContent = managementInterface.getFileData(fileId)
|
||||||
if not fileContent:
|
if not fileContent:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|
@ -1160,15 +1201,15 @@ def preview_file(
|
||||||
currentUser: User = Depends(getCurrentUser),
|
currentUser: User = Depends(getCurrentUser),
|
||||||
context: RequestContext = Depends(getRequestContext)
|
context: RequestContext = Depends(getRequestContext)
|
||||||
) -> FilePreview:
|
) -> FilePreview:
|
||||||
"""Preview a file's content. Uses mandate/instance context when present."""
|
"""Preview a file's content. Resolves the file's mandate/instance scope automatically."""
|
||||||
try:
|
try:
|
||||||
managementInterface = interfaceDbManagement.getInterface(
|
managementInterface, fileMeta = _resolveFileWithScope(currentUser, context, fileId)
|
||||||
currentUser,
|
if not fileMeta:
|
||||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
raise HTTPException(
|
||||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
)
|
detail=f"File with ID {fileId} not found"
|
||||||
|
)
|
||||||
# Get file preview using the correct method
|
|
||||||
preview = managementInterface.getFileContent(fileId)
|
preview = managementInterface.getFileContent(fileId)
|
||||||
if not preview:
|
if not preview:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue