Merge pull request #128 from valueonag/int

Int
This commit is contained in:
Patrick Motsch 2026-04-17 21:53:53 +02:00 committed by GitHub
commit 1cc5510888
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
115 changed files with 4544 additions and 451 deletions

15
app.py
View file

@ -360,6 +360,18 @@ async def lifespan(app: FastAPI):
eventManager.set_event_loop(main_loop) eventManager.set_event_loop(main_loop)
from modules.workflows.scheduler.mainScheduler import setMainLoop as setSchedulerMainLoop from modules.workflows.scheduler.mainScheduler import setMainLoop as setSchedulerMainLoop
setSchedulerMainLoop(main_loop) setSchedulerMainLoop(main_loop)
# Suppress noisy ConnectionResetError from ProactorEventLoop on Windows
# when clients (browsers) close connections abruptly. This is a known
# asyncio issue on Windows: https://bugs.python.org/issue39010
def _suppressClientDisconnect(loop, ctx):
exc = ctx.get("exception")
if isinstance(exc, ConnectionResetError):
return
if isinstance(exc, ConnectionAbortedError):
return
loop.default_exception_handler(ctx)
main_loop.set_exception_handler(_suppressClientDisconnect)
except RuntimeError: except RuntimeError:
pass pass
eventManager.start() eventManager.start()
@ -603,6 +615,9 @@ app.include_router(userAccessOverviewRouter)
from modules.routes.routeAdminDemoConfig import router as demoConfigRouter from modules.routes.routeAdminDemoConfig import router as demoConfigRouter
app.include_router(demoConfigRouter) app.include_router(demoConfigRouter)
from modules.routes.routeAdminDatabaseHealth import router as adminDatabaseHealthRouter
app.include_router(adminDatabaseHealthRouter)
from modules.routes.routeGdpr import router as gdprRouter from modules.routes.routeGdpr import router as gdprRouter
app.include_router(gdprRouter) app.include_router(gdprRouter)

View file

@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
userId: str = Field( userId: str = Field(
description="ID of the user who triggered the AI call", description="ID of the user who triggered the AI call",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
username: Optional[str] = Field( username: Optional[str] = Field(
default=None, default=None,
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="Mandate context of the call", description="Mandate context of the call",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Feature instance context", description="Feature instance context",
json_schema_extra={"label": "Feature-Instanz-ID"}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
featureCode: Optional[str] = Field( featureCode: Optional[str] = Field(
default=None, default=None,
description="Feature code (e.g. workspace, trustee)", description="Feature code (e.g. workspace, trustee)",
json_schema_extra={"label": "Feature"}, json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
) )
instanceLabel: Optional[str] = Field( instanceLabel: Optional[str] = Field(
default=None, default=None,

View file

@ -106,7 +106,13 @@ class AuditLogEntry(BaseModel):
# Actor identification # Actor identification
userId: str = Field( userId: str = Field(
description="ID of the user who performed the action (or 'system' for system events)", description="ID of the user who performed the action (or 'system' for system events)",
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
username: Optional[str] = Field( username: Optional[str] = Field(
@ -119,13 +125,25 @@ class AuditLogEntry(BaseModel):
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate context (if applicable)", description="Mandate context (if applicable)",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False} json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Feature instance context (if applicable)", description="Feature instance context (if applicable)",
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False} json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
# Event classification # Event classification

View file

@ -2,16 +2,28 @@
# All rights reserved. # All rights reserved.
"""Base Pydantic model with system-managed fields (DB + API + UI metadata).""" """Base Pydantic model with system-managed fields (DB + API + UI metadata)."""
from typing import Optional from typing import Dict, Optional, Type
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from modules.shared.i18nRegistry import i18nModel from modules.shared.i18nRegistry import i18nModel
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
"""Look up a PowerOnModel subclass by its table name (= class name)."""
return _MODEL_REGISTRY.get(tableName)
@i18nModel("Basisdatensatz") @i18nModel("Basisdatensatz")
class PowerOnModel(BaseModel): class PowerOnModel(BaseModel):
"""Basis-Datenmodell mit System-Audit-Feldern fuer alle DB-Tabellen.""" """Basis-Datenmodell mit System-Audit-Feldern fuer alle DB-Tabellen."""
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
_MODEL_REGISTRY[cls.__name__] = cls
sysCreatedAt: Optional[float] = Field( sysCreatedAt: Optional[float] = Field(
default=None, default=None,
description="Record creation timestamp (UTC, set by system)", description="Record creation timestamp (UTC, set by system)",

View file

@ -46,11 +46,15 @@ class BillingAccount(PowerOnModel):
description="Primary key", description="Primary key",
json_schema_extra={"label": "ID"}, json_schema_extra={"label": "ID"},
) )
mandateId: str = Field(..., description="Foreign key to Mandate", json_schema_extra={"label": "Mandanten-ID"}) mandateId: str = Field(
...,
description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
)
userId: Optional[str] = Field( userId: Optional[str] = Field(
None, None,
description="Foreign key to User (None = mandate pool account, set = user audit account)", description="Foreign key to User (None = mandate pool account, set = user audit account)",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"}) balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
warningThreshold: float = Field( warningThreshold: float = Field(
@ -74,7 +78,11 @@ class BillingTransaction(PowerOnModel):
description="Primary key", description="Primary key",
json_schema_extra={"label": "ID"}, json_schema_extra={"label": "ID"},
) )
accountId: str = Field(..., description="Foreign key to BillingAccount", json_schema_extra={"label": "Konto-ID"}) accountId: str = Field(
...,
description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
)
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"}) transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"}) amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
description: str = Field(..., description="Transaction description", json_schema_extra={"label": "Beschreibung"}) description: str = Field(..., description="Transaction description", json_schema_extra={"label": "Beschreibung"})
@ -84,12 +92,28 @@ class BillingTransaction(PowerOnModel):
referenceId: Optional[str] = Field(None, description="Reference ID", json_schema_extra={"label": "Referenz-ID"}) referenceId: Optional[str] = Field(None, description="Reference ID", json_schema_extra={"label": "Referenz-ID"})
# Context for workflow transactions # Context for workflow transactions
workflowId: Optional[str] = Field(None, description="Workflow ID (for WORKFLOW transactions)", json_schema_extra={"label": "Workflow-ID"}) workflowId: Optional[str] = Field(
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID", json_schema_extra={"label": "Feature-Instanz-ID"}) None,
featureCode: Optional[str] = Field(None, description="Feature code (e.g., automation)", json_schema_extra={"label": "Feature-Code"}) description="Workflow ID (for WORKFLOW transactions; may be Chat or Graphical Editor)",
json_schema_extra={"label": "Workflow-ID"},
)
featureInstanceId: Optional[str] = Field(
None,
description="Feature instance ID",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
)
featureCode: Optional[str] = Field(
None,
description="Feature code (e.g., automation)",
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
)
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"}) aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"}) aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
createdByUserId: Optional[str] = Field(None, description="User who created/caused this transaction", json_schema_extra={"label": "Erstellt von Benutzer"}) createdByUserId: Optional[str] = Field(
None,
description="User who created/caused this transaction",
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
)
# AI call metadata (for per-call analytics) # AI call metadata (for per-call analytics)
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Verarbeitungszeit (s)"}) processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Verarbeitungszeit (s)"})
@ -106,11 +130,15 @@ class BillingSettings(BaseModel):
description="Primary key", description="Primary key",
json_schema_extra={"label": "ID"}, json_schema_extra={"label": "ID"},
) )
mandateId: str = Field(..., description="Foreign key to Mandate (UNIQUE)", json_schema_extra={"label": "Mandanten-ID"}) mandateId: str = Field(
...,
description="Foreign key to Mandate (UNIQUE)",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
)
warningThresholdPercent: float = Field( warningThresholdPercent: float = Field(
default=10.0, default=10.0,
description="Warning threshold as percentage", description="Benachrichtigung wenn das AI-Guthaben unter diesen Prozentsatz des Gesamtbudgets fällt",
json_schema_extra={"label": "Warnschwelle (%)"}, json_schema_extra={"label": "Warnschwelle (%)"},
) )
@ -179,7 +207,11 @@ class UsageStatistics(BaseModel):
description="Primary key", description="Primary key",
json_schema_extra={"label": "ID"}, json_schema_extra={"label": "ID"},
) )
accountId: str = Field(..., description="Foreign key to BillingAccount", json_schema_extra={"label": "Konto-ID"}) accountId: str = Field(
...,
description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
)
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"}) periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"}) periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})

View file

@ -14,7 +14,10 @@ import uuid
class ChatLog(PowerOnModel): class ChatLog(PowerOnModel):
"""Log entries for chat workflows. User-owned, no mandate context.""" """Log entries for chat workflows. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field(description="Foreign key to workflow", json_schema_extra={"label": "Workflow-ID"}) workflowId: str = Field(
description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"}) message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"}) type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
timestamp: float = Field(default_factory=getUtcTimestamp, timestamp: float = Field(default_factory=getUtcTimestamp,
@ -32,8 +35,14 @@ class ChatLog(PowerOnModel):
class ChatDocument(PowerOnModel): class ChatDocument(PowerOnModel):
"""Documents attached to chat messages. User-owned, no mandate context.""" """Documents attached to chat messages. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
messageId: str = Field(description="Foreign key to message", json_schema_extra={"label": "Nachrichten-ID"}) messageId: str = Field(
fileId: str = Field(description="Foreign key to file", json_schema_extra={"label": "Datei-ID"}) description="Foreign key to message",
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
)
fileId: str = Field(
description="Foreign key to file",
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
)
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"}) fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"}) fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
mimeType: str = Field(description="MIME type of the file", json_schema_extra={"label": "MIME-Typ"}) mimeType: str = Field(description="MIME type of the file", json_schema_extra={"label": "MIME-Typ"})
@ -70,8 +79,15 @@ class ChatContentExtracted(BaseModel):
class ChatMessage(PowerOnModel): class ChatMessage(PowerOnModel):
"""Messages in chat workflows. User-owned, no mandate context.""" """Messages in chat workflows. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field(description="Foreign key to workflow", json_schema_extra={"label": "Workflow-ID"}) workflowId: str = Field(
parentMessageId: Optional[str] = Field(None, description="Parent message ID for threading", json_schema_extra={"label": "Übergeordnete Nachrichten-ID"}) description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
parentMessageId: Optional[str] = Field(
None,
description="Parent message ID for threading",
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
)
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"}) documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"}) documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
message: Optional[str] = Field(None, description="Message content", json_schema_extra={"label": "Nachricht"}) message: Optional[str] = Field(None, description="Message content", json_schema_extra={"label": "Nachricht"})
@ -101,7 +117,17 @@ class WorkflowModeEnum(str, Enum):
class ChatWorkflow(PowerOnModel): class ChatWorkflow(PowerOnModel):
"""Chat workflow container. User-owned, no mandate context.""" """Chat workflow container. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID for multi-tenancy isolation", json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}) featureInstanceId: Optional[str] = Field(
None,
description="Feature instance ID for multi-tenancy isolation",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
status: str = Field(default="running", description="Current status of the workflow", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [ status: str = Field(default="running", description="Current status of the workflow", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
{"value": "running", "label": "Running"}, {"value": "running", "label": "Running"},
{"value": "completed", "label": "Completed"}, {"value": "completed", "label": "Completed"},
@ -169,7 +195,11 @@ class UserInputRequest(BaseModel):
prompt: str = Field(description="Prompt for the user", json_schema_extra={"label": "Eingabeaufforderung"}) prompt: str = Field(description="Prompt for the user", json_schema_extra={"label": "Eingabeaufforderung"})
listFileId: List[str] = Field(default_factory=list, description="List of file IDs", json_schema_extra={"label": "Datei-IDs"}) listFileId: List[str] = Field(default_factory=list, description="List of file IDs", json_schema_extra={"label": "Datei-IDs"})
userLanguage: str = Field(default="en", description="User's preferred language", json_schema_extra={"label": "Benutzersprache"}) userLanguage: str = Field(default="en", description="User's preferred language", json_schema_extra={"label": "Benutzersprache"})
workflowId: Optional[str] = Field(None, description="Optional ID of the workflow to continue", json_schema_extra={"label": "Workflow-ID"}) workflowId: Optional[str] = Field(
None,
description="Optional ID of the workflow to continue",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"}) allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
@i18nModel("Aktions-Dokument") @i18nModel("Aktions-Dokument")
@ -307,7 +337,11 @@ class ChatTaskResult(BaseModel):
@i18nModel("Aufgabe") @i18nModel("Aufgabe")
class TaskItem(BaseModel): class TaskItem(BaseModel):
id: str = Field(..., description="Task ID", json_schema_extra={"label": "Aufgaben-ID"}) id: str = Field(..., description="Task ID", json_schema_extra={"label": "Aufgaben-ID"})
workflowId: str = Field(..., description="Workflow ID", json_schema_extra={"label": "Workflow-ID"}) workflowId: str = Field(
...,
description="Workflow ID",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"}) userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"}) status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"}) error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})

View file

@ -32,7 +32,10 @@ class ContentContextRef(BaseModel):
class ContentObject(BaseModel): class ContentObject(BaseModel):
"""Scalar content object extracted from a file. No AI involved.""" """Scalar content object extracted from a file. No AI involved."""
id: str = Field(default_factory=lambda: str(uuid.uuid4())) id: str = Field(default_factory=lambda: str(uuid.uuid4()))
fileId: str = Field(description="FK to the physical file") fileId: str = Field(
description="FK to the physical file",
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
)
contentType: str = Field(description="text, image, videostream, audiostream, other") contentType: str = Field(description="text, image, videostream, audiostream, other")
data: str = Field(default="", description="Content data (text, base64, URL)") data: str = Field(default="", description="Content data (text, base64, URL)")
contextRef: ContentContextRef = Field(default_factory=ContentContextRef) contextRef: ContentContextRef = Field(default_factory=ContentContextRef)

View file

@ -23,7 +23,7 @@ class DataSource(PowerOnModel):
) )
connectionId: str = Field( connectionId: str = Field(
description="FK to UserConnection", description="FK to UserConnection",
json_schema_extra={"label": "Verbindungs-ID"}, json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
) )
sourceType: str = Field( sourceType: str = Field(
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)", description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
@ -45,17 +45,17 @@ class DataSource(PowerOnModel):
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Scoped to feature instance", description="Scoped to feature instance",
json_schema_extra={"label": "Feature-Instanz"}, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
userId: str = Field( userId: str = Field(
default="", default="",
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
autoSync: bool = Field( autoSync: bool = Field(
default=False, default=False,

View file

@ -18,6 +18,7 @@ class ContentExtracted(BaseModel):
id: str = Field(description="Extraction id or source document id") id: str = Field(description="Extraction id or source document id")
parts: List[ContentPart] = Field(default_factory=list, description="List of extracted parts") parts: List[ContentPart] = Field(default_factory=list, description="List of extracted parts")
summary: Optional[Dict[str, Any]] = Field(default=None, description="Optional extraction summary") summary: Optional[Dict[str, Any]] = Field(default=None, description="Optional extraction summary")
udm: Optional[Any] = Field(default=None, description="Optional UdmDocument (when outputFormat is udm or both)")
class ChunkResult(BaseModel): class ChunkResult(BaseModel):
@ -76,6 +77,19 @@ class ExtractionOptions(BaseModel):
prompt: str = Field(default="", description="Extraction prompt for AI processing") prompt: str = Field(default="", description="Extraction prompt for AI processing")
processDocumentsIndividually: bool = Field(default=True, description="Process each document separately") processDocumentsIndividually: bool = Field(default=True, description="Process each document separately")
outputFormat: Literal["parts", "udm", "both"] = Field(
default="parts",
description="Return flat parts only, UDM tree only, or both (parts always populated; udm when udm or both)",
)
outputDetail: Literal["full", "structure", "references"] = Field(
default="full",
description="Extraction detail: full inline data, skeleton without raw payloads, or file references only",
)
lazyContainer: bool = Field(
default=False,
description="For archives: emit file entries with metadata only (no nested extraction)",
)
# Image processing parameters # Image processing parameters
imageMaxPixels: int = Field(default=1024 * 1024, ge=1, description="Maximum pixels for image processing") imageMaxPixels: int = Field(default=1024 * 1024, ge=1, description="Maximum pixels for image processing")
imageQuality: int = Field(default=85, ge=1, le=100, description="Image quality (1-100)") imageQuality: int = Field(default=85, ge=1, le=100, description="Image quality (1-100)")

View file

@ -6,7 +6,7 @@ A FeatureDataSource links a FeatureInstance table (DATA_OBJECT) to a workspace
so the agent can query structured feature data (e.g. TrusteePosition rows). so the agent can query structured feature data (e.g. TrusteePosition rows).
""" """
from typing import Dict, Optional from typing import Dict, List, Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.i18nRegistry import i18nModel from modules.shared.i18nRegistry import i18nModel
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="FK to FeatureInstance", description="FK to FeatureInstance",
json_schema_extra={"label": "Feature-Instanz"}, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
featureCode: str = Field( featureCode: str = Field(
description="Feature code (e.g. trustee, commcoach)", description="Feature code (e.g. trustee, commcoach)",
json_schema_extra={"label": "Feature"}, json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
) )
tableName: str = Field( tableName: str = Field(
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)", description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
default="", default="",
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandant"}, json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
userId: str = Field( userId: str = Field(
default="", default="",
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer"}, json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
workspaceInstanceId: str = Field( workspaceInstanceId: str = Field(
description="Workspace instance where this source is used", description="Workspace feature instance where this source is used",
json_schema_extra={"label": "Workspace"}, json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
scope: str = Field( scope: str = Field(
default="personal", default="personal",
@ -70,6 +70,11 @@ class FeatureDataSource(PowerOnModel):
description="Whether this data source should be neutralized before AI processing", description="Whether this data source should be neutralized before AI processing",
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}, json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
) )
neutralizeFields: Optional[List[str]] = Field(
default=None,
description="Column names whose values are replaced with placeholders before AI processing",
json_schema_extra={"label": "Zu neutralisierende Felder", "frontend_type": "multiselect", "frontend_readonly": False, "frontend_required": False},
)
recordFilter: Optional[Dict[str, str]] = Field( recordFilter: Optional[Dict[str, str]] = Field(
default=None, default=None,
description="Record-level filter applied when querying this table, e.g. {'sessionId': 'abc-123'}", description="Record-level filter applied when querying this table, e.g. {'sessionId': 'abc-123'}",

View file

@ -38,11 +38,23 @@ class FeatureInstance(PowerOnModel):
) )
featureCode: str = Field( featureCode: str = Field(
description="FK -> Feature.code", description="FK -> Feature.code",
json_schema_extra={"label": "Feature", "frontend_type": "select", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Feature",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
},
) )
mandateId: str = Field( mandateId: str = Field(
description="FK -> Mandate.id (CASCADE DELETE)", description="FK -> Mandate.id (CASCADE DELETE)",
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
label: str = Field( label: str = Field(
default="", default="",

View file

@ -24,15 +24,59 @@ class FileFolder(PowerOnModel):
parentId: Optional[str] = Field( parentId: Optional[str] = Field(
default=None, default=None,
description="Parent folder ID (null = root)", description="Parent folder ID (null = root)",
json_schema_extra={"label": "Uebergeordneter Ordner", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False}, json_schema_extra={
"label": "Uebergeordneter Ordner",
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
},
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate context", description="Mandate context",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}, json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Feature instance context", description="Feature instance context",
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}, json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
scope: str = Field(
default="personal",
description="Data visibility scope: personal, featureInstance, mandate, global. Inherited by files in this folder.",
json_schema_extra={
"label": "Sichtbarkeit",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": False,
"frontend_options": [
{"value": "personal", "label": "Persönlich"},
{"value": "featureInstance", "label": "Feature-Instanz"},
{"value": "mandate", "label": "Mandant"},
{"value": "global", "label": "Global"},
],
},
)
neutralize: bool = Field(
default=False,
description="Whether files in this folder should be neutralized before AI processing. Inherited by new/moved files.",
json_schema_extra={
"label": "Neutralisieren",
"frontend_type": "checkbox",
"frontend_readonly": False,
"frontend_required": False,
},
) )

View file

@ -33,6 +33,7 @@ class FileItem(PowerOnModel):
"frontend_fk_source": "/api/mandates/", "frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "Mandate", "fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -46,6 +47,7 @@ class FileItem(PowerOnModel):
"frontend_fk_source": "/api/features/instances", "frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "FeatureInstance", "fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
mimeType: str = Field( mimeType: str = Field(
@ -68,7 +70,13 @@ class FileItem(PowerOnModel):
folderId: Optional[str] = Field( folderId: Optional[str] = Field(
default=None, default=None,
description="ID of the parent folder", description="ID of the parent folder",
json_schema_extra={"label": "Ordner-ID", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False}, json_schema_extra={
"label": "Ordner-ID",
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
},
) )
description: Optional[str] = Field( description: Optional[str] = Field(
default=None, default=None,

View file

@ -32,12 +32,24 @@ class Invitation(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
description="FK → Mandate.id - Target mandate for the invitation", description="FK → Mandate.id - Target mandate for the invitation",
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Optional FK → FeatureInstance.id - Direct access to specific feature", description="Optional FK → FeatureInstance.id - Direct access to specific feature",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False} json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
roleIds: List[str] = Field( roleIds: List[str] = Field(
default_factory=list, default_factory=list,
@ -63,7 +75,13 @@ class Invitation(PowerOnModel):
usedBy: Optional[str] = Field( usedBy: Optional[str] = Field(
default=None, default=None,
description="User ID of the person who used the invitation", description="User ID of the person who used the invitation",
json_schema_extra={"label": "Verwendet von", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False} json_schema_extra={
"label": "Verwendet von",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
usedAt: Optional[float] = Field( usedAt: Optional[float] = Field(
default=None, default=None,

View file

@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID"}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
mandateId: str = Field( mandateId: str = Field(
default="", default="",
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
fileName: str = Field( fileName: str = Field(
description="Original file name", description="Original file name",
@ -116,16 +116,16 @@ class ContentChunk(PowerOnModel):
) )
fileId: str = Field( fileId: str = Field(
description="FK to the source file", description="FK to the source file",
json_schema_extra={"label": "Datei-ID"}, json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID"}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
contentType: str = Field( contentType: str = Field(
description="Content type: text, image, videostream, audiostream, other", description="Content type: text, image, videostream, audiostream, other",
@ -214,16 +214,16 @@ class WorkflowMemory(PowerOnModel):
) )
workflowId: str = Field( workflowId: str = Field(
description="FK to the workflow", description="FK to the workflow",
json_schema_extra={"label": "Workflow-ID"}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID"}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
key: str = Field( key: str = Field(
description="Key identifier (e.g. 'entity:companyName')", description="Key identifier (e.g. 'entity:companyName')",

View file

@ -34,6 +34,7 @@ class UserMandate(PowerOnModel):
"frontend_fk_source": "/api/users/", "frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username", "frontend_fk_display_field": "username",
"fk_model": "User", "fk_model": "User",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
mandateId: str = Field( mandateId: str = Field(
@ -46,6 +47,7 @@ class UserMandate(PowerOnModel):
"frontend_fk_source": "/api/mandates/", "frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "Mandate", "fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
enabled: bool = Field( enabled: bool = Field(
@ -68,11 +70,27 @@ class FeatureAccess(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="FK → User.id (CASCADE DELETE)", description="FK → User.id (CASCADE DELETE)",
json_schema_extra={"label": "Benutzer", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/users/", "frontend_fk_display_field": "username"} json_schema_extra={
"label": "Benutzer",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="FK → FeatureInstance.id (CASCADE DELETE)", description="FK → FeatureInstance.id (CASCADE DELETE)",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"} json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
enabled: bool = Field( enabled: bool = Field(
default=True, default=True,
@ -94,11 +112,25 @@ class UserMandateRole(PowerOnModel):
) )
userMandateId: str = Field( userMandateId: str = Field(
description="FK → UserMandate.id (CASCADE DELETE)", description="FK → UserMandate.id (CASCADE DELETE)",
json_schema_extra={"label": "Benutzer-Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Benutzer-Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
},
) )
roleId: str = Field( roleId: str = Field(
description="FK → Role.id (CASCADE DELETE)", description="FK → Role.id (CASCADE DELETE)",
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"} json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
) )
@ -115,9 +147,23 @@ class FeatureAccessRole(PowerOnModel):
) )
featureAccessId: str = Field( featureAccessId: str = Field(
description="FK → FeatureAccess.id (CASCADE DELETE)", description="FK → FeatureAccess.id (CASCADE DELETE)",
json_schema_extra={"label": "Feature-Zugang", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Feature-Zugang",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
},
) )
roleId: str = Field( roleId: str = Field(
description="FK → Role.id (CASCADE DELETE)", description="FK → Role.id (CASCADE DELETE)",
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"} json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
) )

View file

@ -64,6 +64,7 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -73,6 +74,7 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
description: Optional[str] = Field( description: Optional[str] = Field(
@ -129,6 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -138,6 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
subscriptionId: str = Field( subscriptionId: str = Field(
@ -156,6 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Benutzer-ID", "label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
channel: MessagingChannel = Field( channel: MessagingChannel = Field(
@ -244,6 +249,7 @@ class MessagingDelivery(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Benutzer-ID", "label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
channel: MessagingChannel = Field( channel: MessagingChannel = Field(

View file

@ -60,7 +60,13 @@ class UserNotification(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="Target user ID for this notification", description="Target user ID for this notification",
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True} json_schema_extra={
"label": "Benutzer",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
type: NotificationType = Field( type: NotificationType = Field(

View file

@ -57,12 +57,30 @@ class Role(PowerOnModel):
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="FK → Mandate.id (CASCADE DELETE). Null = Global/Template role.", description="FK → Mandate.id (CASCADE DELETE). Null = Global/Template role.",
json_schema_extra={"label": "Mandant", "frontend_type": "select", "frontend_readonly": True, "frontend_visible": True, "frontend_required": False, "frontend_fk_source": "/api/mandates/", "frontend_fk_display_field": "label"} json_schema_extra={
"label": "Mandant",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="FK → FeatureInstance.id (CASCADE DELETE). Null = Mandate-level or Global role.", description="FK → FeatureInstance.id (CASCADE DELETE). Null = Mandate-level or Global role.",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "select", "frontend_readonly": True, "frontend_visible": True, "frontend_required": False, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"} json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
featureCode: Optional[str] = Field( featureCode: Optional[str] = Field(
default=None, default=None,
@ -92,7 +110,15 @@ class AccessRule(PowerOnModel):
) )
roleId: str = Field( roleId: str = Field(
description="FK → Role.id (CASCADE DELETE!)", description="FK → Role.id (CASCADE DELETE!)",
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": True, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"} json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
) )
context: AccessRuleContext = Field( context: AccessRuleContext = Field(
description="Context type: DATA (database), UI (interface), RESOURCE (system resources). IMMUTABLE!", description="Context type: DATA (database), UI (interface), RESOURCE (system resources). IMMUTABLE!",

View file

@ -47,7 +47,7 @@ class Token(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
..., ...,
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
authority: AuthAuthority = Field( authority: AuthAuthority = Field(
..., ...,
@ -56,7 +56,7 @@ class Token(PowerOnModel):
connectionId: Optional[str] = Field( connectionId: Optional[str] = Field(
None, None,
description="ID of the connection this token belongs to", description="ID of the connection this token belongs to",
json_schema_extra={"label": "Verbindungs-ID"}, json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
) )
tokenPurpose: Optional[TokenPurpose] = Field( tokenPurpose: Optional[TokenPurpose] = Field(
default=None, default=None,
@ -92,7 +92,7 @@ class Token(PowerOnModel):
revokedBy: Optional[str] = Field( revokedBy: Optional[str] = Field(
None, None,
description="User ID who revoked the token (admin/self)", description="User ID who revoked the token (admin/self)",
json_schema_extra={"label": "Widerrufen von"}, json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
reason: Optional[str] = Field( reason: Optional[str] = Field(
None, None,
@ -134,7 +134,13 @@ class AuthEvent(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="ID of the user this event belongs to", description="ID of the user this event belongs to",
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
eventType: str = Field( eventType: str = Field(
description="Type of authentication event (e.g., 'login', 'logout', 'token_refresh')", description="Type of authentication event (e.g., 'login', 'logout', 'token_refresh')",

View file

@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
..., ...,
description="Foreign key to Mandate", description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
planKey: str = Field( planKey: str = Field(
..., ...,

View file

@ -114,7 +114,13 @@ class UserConnection(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="ID of the user this connection belongs to", description="ID of the user this connection belongs to",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Benutzer-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
authority: AuthAuthority = Field( authority: AuthAuthority = Field(
description="Authentication authority", description="Authentication authority",
@ -191,7 +197,6 @@ class UserConnection(PowerOnModel):
json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"}, json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"},
) )
@computed_field
@computed_field @computed_field
@property @property
def connectionReference(self) -> str: def connectionReference(self) -> str:
@ -369,11 +374,14 @@ class UserVoicePreferences(PowerOnModel):
description="Primary key", description="Primary key",
json_schema_extra={"label": "ID"}, json_schema_extra={"label": "ID"},
) )
userId: str = Field(description="User ID", json_schema_extra={"label": "Benutzer-ID"}) userId: str = Field(
description="User ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
)
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate scope (None = global for user)", description="Mandate scope (None = global for user)",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
sttLanguage: str = Field( sttLanguage: str = Field(
default="de-DE", default="de-DE",

View file

@ -0,0 +1,316 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""Unified Document Model (UDM) — hierarchical document tree and ContentPart bridge."""
from __future__ import annotations
import uuid
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
from pydantic import BaseModel, Field
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
class UdmMetadata(BaseModel):
title: Optional[str] = None
author: Optional[str] = None
createdAt: Optional[str] = None
modifiedAt: Optional[str] = None
sourcePath: str = ""
tags: List[str] = Field(default_factory=list)
custom: Dict[str, Any] = Field(default_factory=dict)
class UdmBoundingBox(BaseModel):
x: float = 0.0
y: float = 0.0
width: float = 0.0
height: float = 0.0
unit: Literal["px", "pt", "mm"] = "pt"
class UdmPosition(BaseModel):
index: int = 0
page: Optional[int] = None
row: Optional[int] = None
col: Optional[int] = None
bbox: Optional[UdmBoundingBox] = None
class UdmContentBlock(BaseModel):
id: str
contentType: Literal["text", "image", "table", "code", "media", "link", "formula"]
raw: str = ""
fileRef: Optional[str] = None
mimeType: Optional[str] = None
language: Optional[str] = None
attributes: Dict[str, Any] = Field(default_factory=dict)
position: UdmPosition = Field(default_factory=lambda: UdmPosition(index=0))
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
class UdmStructuralNode(BaseModel):
id: str
role: Literal["page", "section", "slide", "sheet"]
index: int
label: Optional[str] = None
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[UdmContentBlock] = Field(default_factory=list)
class UdmDocument(BaseModel):
id: str
role: Literal["document"] = "document"
sourceType: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = "unknown"
sourcePath: str = ""
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[UdmStructuralNode] = Field(default_factory=list)
class UdmArchive(BaseModel):
id: str
role: Literal["archive"] = "archive"
sourceType: Literal["zip", "tar", "gz", "unknown"] = "unknown"
sourcePath: str = ""
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[Union[UdmArchive, UdmDocument]] = Field(default_factory=list)
def _newId() -> str:
return str(uuid.uuid4())
def _mapTypeGroupToContentType(typeGroup: str) -> Literal["text", "image", "table", "code", "media", "link", "formula"]:
if typeGroup == "image":
return "image"
if typeGroup == "table":
return "table"
if typeGroup in ("code",):
return "code"
if typeGroup in ("binary", "audiostream", "videostream"):
return "media"
if typeGroup in ("structure", "text", "container"):
return "text"
return "text"
def _contentPartToBlock(part: ContentPart, blockIndex: int) -> UdmContentBlock:
meta = part.metadata or {}
ctx = meta.get("contextRef") or {}
if not isinstance(ctx, dict):
ctx = {}
page = meta.get("pageIndex")
if page is None:
page = ctx.get("pageIndex")
slide = meta.get("slide_number")
if slide is None:
slide = ctx.get("slideIndex")
pos = UdmPosition(
index=blockIndex,
page=int(page) + 1 if isinstance(page, int) else None,
)
extraAttr: Dict[str, Any] = {}
if isinstance(slide, int):
extraAttr["slideIndex"] = slide
return UdmContentBlock(
id=part.id,
contentType=_mapTypeGroupToContentType(part.typeGroup),
raw=part.data or "",
mimeType=part.mimeType or None,
attributes={
"typeGroup": part.typeGroup,
"label": part.label,
"parentId": part.parentId,
**({"contextRef": ctx} if ctx else {}),
**extraAttr,
},
position=pos,
metadata=UdmMetadata(
sourcePath=meta.get("containerPath", "") or "",
custom={k: v for k, v in meta.items() if k not in ("contextRef",)},
),
)
def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
"""Return (role, structural_index, label) for grouping parts into structural nodes."""
meta = part.metadata or {}
ctx = meta.get("contextRef") or {}
if not isinstance(ctx, dict):
ctx = {}
if "pageIndex" in meta or "pageIndex" in ctx:
pi = meta.get("pageIndex", ctx.get("pageIndex", 0))
try:
idx = int(pi)
except (TypeError, ValueError):
idx = 0
return ("page", idx, f"page_{idx + 1}")
if meta.get("slide_number") is not None:
try:
idx = int(meta["slide_number"]) - 1
except (TypeError, ValueError):
idx = 0
return ("slide", max(0, idx), f"slide_{idx + 1}")
if ctx.get("slideIndex") is not None:
try:
idx = int(ctx.get("slideIndex", 0))
except (TypeError, ValueError):
idx = 0
return ("slide", max(0, idx), f"slide_{idx + 1}")
if meta.get("sheet") or ctx.get("sheetName"):
name = str(meta.get("sheet") or ctx.get("sheetName") or "sheet")
return ("sheet", abs(hash(name)) % (10**9), name)
if ctx.get("sectionId") or meta.get("sectionId"):
sid = str(ctx.get("sectionId") or meta.get("sectionId") or "section")
return ("section", abs(hash(sid)) % (10**9), sid)
if part.typeGroup == "container":
return ("section", 0, "root")
return ("section", 0, "body")
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
parts = list(extracted.parts or [])
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
sourceType if sourceType in _VALID_DOC_SOURCES else "unknown" # type: ignore[assignment]
)
doc = UdmDocument(
id=extracted.id or _newId(),
sourceType=st,
sourcePath=sourcePath,
metadata=UdmMetadata(sourcePath=sourcePath),
)
if not parts:
return doc
skipIds = set()
rootIds = set()
for p in parts:
if p.typeGroup == "container" and p.parentId is None:
rootIds.add(p.id)
skipIds.add(p.id)
contentParts = [p for p in parts if p.id not in skipIds and p.typeGroup != "container"]
if not contentParts:
for p in parts:
if p.id not in skipIds:
contentParts.append(p)
if not contentParts:
return doc
groups: Dict[Tuple[str, int, str], List[ContentPart]] = {}
for p in contentParts:
key = _groupKeyForPart(p)
groups.setdefault(key, []).append(p)
sortedKeys = sorted(groups.keys(), key=lambda k: (k[0], k[1], k[2]))
for gi, key in enumerate(sortedKeys):
role, structIdx, label = key
plist = groups[key]
node = UdmStructuralNode(
id=_newId(),
role=role if role in ("page", "section", "slide", "sheet") else "section",
index=gi if role == "section" else structIdx,
label=label,
metadata=UdmMetadata(sourcePath=sourcePath),
)
for bi, part in enumerate(plist):
node.children.append(_contentPartToBlock(part, bi))
doc.children.append(node)
return doc
def _udmToContentParts(document: UdmDocument) -> ContentExtracted:
"""Flatten UdmDocument back to ContentExtracted for backward compatibility."""
rootId = _newId()
parts: List[ContentPart] = [
ContentPart(
id=rootId,
parentId=None,
label=document.sourceType or "document",
typeGroup="container",
mimeType="application/octet-stream",
data="",
metadata={"udmRoot": True, "sourcePath": document.sourcePath},
)
]
for sn in document.children:
for block in sn.children:
meta = dict(block.metadata.custom) if block.metadata else {}
meta.setdefault("structuralRole", sn.role)
meta.setdefault("structuralIndex", sn.index)
parts.append(
ContentPart(
id=block.id,
parentId=rootId,
label=block.attributes.get("label", sn.label or ""),
typeGroup=str(block.attributes.get("typeGroup", "text")),
mimeType=block.mimeType or "text/plain",
data=block.raw,
metadata=meta,
)
)
return ContentExtracted(id=document.id, parts=parts)
def _stripUdmRaw(udm: UdmDocument) -> UdmDocument:
"""Return a deep copy with all content block `raw` cleared (structure-only preview)."""
clone = udm.model_copy(deep=True)
for sn in clone.children:
for block in sn.children:
block.raw = ""
return clone
def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
"""Clear inline payloads; keep `fileRef` when already set in attributes/metadata."""
clone = udm.model_copy(deep=True)
for sn in clone.children:
for block in sn.children:
block.raw = ""
if not block.fileRef:
ref = block.attributes.get("fileRef")
if block.metadata and block.metadata.custom:
ref = ref or block.metadata.custom.get("fileRef")
if isinstance(ref, str) and ref:
block.fileRef = ref
return clone
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
if detail == "structure":
return _stripUdmRaw(udm)
if detail == "references":
return _stripUdmForReferences(udm)
return udm
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
m = (mimeType or "").lower()
fn = (fileName or "").lower()
if m == "application/pdf" or fn.endswith(".pdf"):
return "pdf"
if "wordprocessingml" in m or fn.endswith(".docx"):
return "docx"
if "presentationml" in m or fn.endswith((".pptx", ".ppt")):
return "pptx"
if "spreadsheetml" in m or fn.endswith((".xlsx", ".xlsm")):
return "xlsx"
if m == "text/html" or fn.endswith((".html", ".htm")):
return "html"
if m == "application/octet-stream" or not m:
return "binary"
return "unknown"

View file

@ -22,7 +22,13 @@ class Prompt(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
default="", default="",
description="ID of the mandate this prompt belongs to", description="ID of the mandate this prompt belongs to",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}, json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
isSystem: bool = Field( isSystem: bool = Field(
default=False, default=False,

View file

@ -21,8 +21,12 @@ from modules.datamodels.datamodelUam import AccessLevel
from modules.datamodels.datamodelChat import UserInputRequest from modules.datamodels.datamodelChat import UserInputRequest
from modules.datamodels.datamodelBase import PowerOnModel from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
chatbotDatabase = "poweron_chatbot"
registerDatabase(chatbotDatabase)
# ============================================================================= # =============================================================================
# Chatbot-specific Pydantic models for poweron_chatbot (per-instance isolation) # Chatbot-specific Pydantic models for poweron_chatbot (per-instance isolation)
# ============================================================================= # =============================================================================
@ -392,7 +396,7 @@ class ChatObjects:
try: try:
# Get configuration values with defaults # Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_chatbot" dbDatabase = chatbotDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -11,6 +11,7 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getIsoTimestamp from modules.shared.timeUtils import getIsoTimestamp
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import resolveText, t from modules.shared.i18nRegistry import resolveText, t
@ -26,6 +27,9 @@ from .datamodelCommcoach import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
commcoachDatabase = "poweron_commcoach"
registerDatabase(commcoachDatabase)
_interfaces = {} _interfaces = {}
@ -51,7 +55,7 @@ class CommcoachObjects:
self.userId = str(currentUser.id) if currentUser else "system" self.userId = str(currentUser.id) if currentUser else "system"
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_commcoach" dbDatabase = commcoachDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -150,7 +150,7 @@ async def checkAndAwardBadges(interface, userId: str, mandateId: str, instanceId
except Exception: except Exception:
allContexts = [] allContexts = []
completedTasks = interface.getCompletedTaskCount(userId) if hasattr(interface, 'getCompletedTaskCount') else 0 completedTasks = interface.getCompletedTaskCount(userId, instanceId) if hasattr(interface, 'getCompletedTaskCount') else 0
if completedTasks >= 10: if completedTasks >= 10:
badgesToCheck.append(("task_completer", True)) badgesToCheck.append(("task_completer", True))

View file

@ -101,6 +101,51 @@ BUILTIN_PERSONAS: List[Dict[str, Any]] = [
"gender": "m", "gender": "m",
"category": "builtin", "category": "builtin",
}, },
# --- Immobilien / Liegenschaftsverwaltung (PWG-Kontext) ---
{
"key": "tenant_payment_arrears_m",
"label": "Mieter mit Zahlungsrückstand",
"description": "René Bachmann, Mieter einer 3.5-Zimmer-Wohnung. Seit drei Monaten im Mietrückstand, hat zwei Mahnungen "
"erhalten und ist genervt vom Druck. Fühlt sich ungerecht behandelt, verweist auf persönliche Schwierigkeiten "
"(Jobverlust, Scheidung). Reagiert defensiv und gereizt auf Forderungen. Braucht empathisches Gegenüber, "
"das gleichzeitig klar die Zahlungspflicht kommuniziert. Kann sich auf eine Ratenzahlung einlassen, "
"wenn er sich respektiert fühlt und einen konkreten Plan sieht.",
"gender": "m",
"category": "builtin",
},
{
"key": "tenant_utility_costs_f",
"label": "Mieterin mit Nebenkostenfragen",
"description": "Fatima El-Amin, Mieterin seit vier Jahren. Hat die jährliche Nebenkostenabrechnung erhalten und versteht "
"mehrere Positionen nicht (Hauswartung, Allgemeinstrom, Verwaltungskosten). Emotional aufgebracht, weil die "
"Nachzahlung unerwartet hoch ist. Vermutet Fehler oder unfaire Verteilung. Spricht schnell und unterbricht. "
"Braucht geduldige, verständliche Erklärungen ohne Fachjargon. Beruhigt sich, wenn man Positionen einzeln "
"durchgeht und auf die Rechtsgrundlage (Mietvertrag, Nebenkosten-Verordnung) verweist.",
"gender": "f",
"category": "builtin",
},
{
"key": "new_tenant_move_in_m",
"label": "Neuer Mieter (Einzug)",
"description": "Luca Steiner, zieht nächste Woche in seine erste eigene Wohnung ein. Aufgeregt aber unsicher — hat viele "
"Fragen zu Wohnungsübergabe, Schlüsselabholung, Hausordnung, Kautionseinzahlung und Anmeldung bei Werken "
"(Strom, Internet). Höflich und kooperativ, braucht aber klare, schrittweise Informationen. Fragt mehrfach "
"nach, wenn etwas unklar ist. Reagiert sehr positiv auf eine willkommene, strukturierte Begleitung.",
"gender": "m",
"category": "builtin",
},
{
"key": "difficult_neighbor_noise_m",
"label": "Nachbar mit Lärmbeschwerde",
"description": "Kurt Zürcher, langjähriger Mieter im Erdgeschoss. Beschwert sich massiv über Lärm aus der Wohnung darüber "
"(Musik abends, Kindergetrampel, Waschmaschine nach 22 Uhr). Hat bereits ein Lärmprotokoll geführt und "
"droht mit Mietminderung und Anwalt. Spricht laut, ist aufgebracht und fühlt sich von der Verwaltung "
"nicht ernst genommen. Erwartet sofortige Massnahmen. Kann deeskaliert werden, wenn man sein Anliegen "
"ernst nimmt, konkrete nächste Schritte aufzeigt (Gespräch mit Nachbar, schriftliche Verwarnung) und "
"auf die Hausordnung sowie seine Rechte und Pflichten verweist.",
"gender": "m",
"category": "builtin",
},
] ]

View file

@ -71,6 +71,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_fk_source": "/api/mandates/", "frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "Mandate", "fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -83,6 +84,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_fk_source": "/api/features/instances", "frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "FeatureInstance", "fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
label: str = Field( label: str = Field(
@ -107,7 +109,13 @@ class AutoWorkflow(PowerOnModel):
templateSourceId: Optional[str] = Field( templateSourceId: Optional[str] = Field(
default=None, default=None,
description="ID of the template this workflow was created from", description="ID of the template this workflow was created from",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Vorlagen-Quelle"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Vorlagen-Quelle",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
) )
templateScope: Optional[str] = Field( templateScope: Optional[str] = Field(
default=None, default=None,
@ -122,7 +130,13 @@ class AutoWorkflow(PowerOnModel):
currentVersionId: Optional[str] = Field( currentVersionId: Optional[str] = Field(
default=None, default=None,
description="ID of the currently published AutoVersion", description="ID of the currently published AutoVersion",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Aktuelle Version"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Aktuelle Version",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
},
) )
active: bool = Field( active: bool = Field(
default=True, default=True,
@ -165,7 +179,13 @@ class AutoVersion(PowerOnModel):
) )
workflowId: str = Field( workflowId: str = Field(
description="FK -> AutoWorkflow", description="FK -> AutoWorkflow",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
) )
versionNumber: int = Field( versionNumber: int = Field(
default=1, default=1,
@ -195,7 +215,13 @@ class AutoVersion(PowerOnModel):
publishedBy: Optional[str] = Field( publishedBy: Optional[str] = Field(
default=None, default=None,
description="User ID who published this version", description="User ID who published this version",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht von"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Veröffentlicht von",
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
@ -212,7 +238,13 @@ class AutoRun(PowerOnModel):
) )
workflowId: str = Field( workflowId: str = Field(
description="Workflow ID", description="Workflow ID",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
) )
label: Optional[str] = Field( label: Optional[str] = Field(
default=None, default=None,
@ -230,17 +262,30 @@ class AutoRun(PowerOnModel):
"frontend_fk_source": "/api/mandates/", "frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label", "frontend_fk_display_field": "label",
"fk_model": "Mandate", "fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
ownerId: Optional[str] = Field( ownerId: Optional[str] = Field(
default=None, default=None,
description="User ID who triggered this run", description="User ID who triggered this run",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Auslöser"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Auslöser",
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
versionId: Optional[str] = Field( versionId: Optional[str] = Field(
default=None, default=None,
description="AutoVersion ID used for this run", description="AutoVersion ID used for this run",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Versions-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Versions-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
},
) )
status: str = Field( status: str = Field(
default=AutoRunStatus.RUNNING.value, default=AutoRunStatus.RUNNING.value,
@ -307,7 +352,13 @@ class AutoStepLog(PowerOnModel):
) )
runId: str = Field( runId: str = Field(
description="FK -> AutoRun", description="FK -> AutoRun",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Lauf-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
},
) )
nodeId: str = Field( nodeId: str = Field(
description="Node ID in the graph", description="Node ID in the graph",
@ -377,11 +428,23 @@ class AutoTask(PowerOnModel):
) )
runId: str = Field( runId: str = Field(
description="FK -> AutoRun", description="FK -> AutoRun",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Lauf-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
},
) )
workflowId: str = Field( workflowId: str = Field(
description="Workflow ID", description="Workflow ID",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
) )
nodeId: str = Field( nodeId: str = Field(
description="Node ID in the graph", description="Node ID in the graph",
@ -399,7 +462,13 @@ class AutoTask(PowerOnModel):
assigneeId: Optional[str] = Field( assigneeId: Optional[str] = Field(
default=None, default=None,
description="User ID assigned to complete the task", description="User ID assigned to complete the task",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False, "label": "Zugewiesen an"}, json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"label": "Zugewiesen an",
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
status: str = Field( status: str = Field(
default=AutoTaskStatus.PENDING.value, default=AutoTaskStatus.PENDING.value,

View file

@ -38,10 +38,12 @@ from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
from modules.features.graphicalEditor.entryPoints import normalize_invocations_list from modules.features.graphicalEditor.entryPoints import normalize_invocations_list
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_GREENFIELD_DB = "poweron_graphicaleditor" graphicalEditorDatabase = "poweron_graphicaleditor"
registerDatabase(graphicalEditorDatabase)
_CALLBACK_WORKFLOW_CHANGED = "graphicalEditor.workflow.changed" _CALLBACK_WORKFLOW_CHANGED = "graphicalEditor.workflow.changed"
@ -68,7 +70,7 @@ def getAllWorkflowsForScheduling() -> List[Dict[str, Any]]:
Used by the scheduler to register cron jobs. Does not filter by mandate/instance. Used by the scheduler to register cron jobs. Does not filter by mandate/instance.
""" """
dbHost = APP_CONFIG.get("DB_HOST", "localhost") dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = _GREENFIELD_DB dbDatabase = graphicalEditorDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -155,7 +157,7 @@ class GraphicalEditorObjects:
def _init_db(self): def _init_db(self):
"""Initialize database connection to poweron_graphicaleditor (Greenfield).""" """Initialize database connection to poweron_graphicaleditor (Greenfield)."""
dbHost = APP_CONFIG.get("DB_HOST", "localhost") dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = _GREENFIELD_DB dbDatabase = graphicalEditorDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -174,12 +176,11 @@ class GraphicalEditorObjects:
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
def getWorkflows(self, active: Optional[bool] = None) -> List[Dict[str, Any]]: def getWorkflows(self, active: Optional[bool] = None) -> List[Dict[str, Any]]:
"""Get all workflows for this mandate and feature instance.""" """Get all workflows for this mandate (cross-instance)."""
if not self.db._ensureTableExists(Automation2Workflow): if not self.db._ensureTableExists(Automation2Workflow):
return [] return []
rf: Dict[str, Any] = { rf: Dict[str, Any] = {
"mandateId": self.mandateId, "mandateId": self.mandateId,
"featureInstanceId": self.featureInstanceId,
} }
if active is not None: if active is not None:
rf["active"] = active rf["active"] = active
@ -193,7 +194,7 @@ class GraphicalEditorObjects:
return rows return rows
def getWorkflow(self, workflowId: str) -> Optional[Dict[str, Any]]: def getWorkflow(self, workflowId: str) -> Optional[Dict[str, Any]]:
"""Get a single workflow by ID.""" """Get a single workflow by ID (mandate-scoped, cross-instance)."""
if not self.db._ensureTableExists(Automation2Workflow): if not self.db._ensureTableExists(Automation2Workflow):
return None return None
records = self.db.getRecordset( records = self.db.getRecordset(
@ -201,7 +202,6 @@ class GraphicalEditorObjects:
recordFilter={ recordFilter={
"id": workflowId, "id": workflowId,
"mandateId": self.mandateId, "mandateId": self.mandateId,
"featureInstanceId": self.featureInstanceId,
}, },
) )
if not records: if not records:

View file

@ -11,6 +11,7 @@ from .clickup import CLICKUP_NODES
from .file import FILE_NODES from .file import FILE_NODES
from .trustee import TRUSTEE_NODES from .trustee import TRUSTEE_NODES
from .data import DATA_NODES from .data import DATA_NODES
from .context import CONTEXT_NODES
STATIC_NODE_TYPES = ( STATIC_NODE_TYPES = (
TRIGGER_NODES TRIGGER_NODES
@ -23,4 +24,5 @@ STATIC_NODE_TYPES = (
+ FILE_NODES + FILE_NODES
+ TRUSTEE_NODES + TRUSTEE_NODES
+ DATA_NODES + DATA_NODES
+ CONTEXT_NODES
) )

View file

@ -26,7 +26,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}}, "outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-robot", "color": "#9C27B0"}, "meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "process", "_action": "process",
}, },
@ -43,7 +43,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}}, "outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-magnify", "color": "#9C27B0"}, "meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "webResearch", "_action": "webResearch",
}, },
@ -61,7 +61,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}}, "outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0"}, "meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "summarizeDocument", "_action": "summarizeDocument",
}, },
@ -79,7 +79,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}}, "outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-translate", "color": "#9C27B0"}, "meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "translateDocument", "_action": "translateDocument",
}, },
@ -97,7 +97,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0"}, "meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "convertDocument", "_action": "convertDocument",
}, },
@ -114,7 +114,7 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0"}, "meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "generateDocument", "_action": "generateDocument",
}, },
@ -134,8 +134,28 @@ AI_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}}, "outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0"}, "meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
"_method": "ai", "_method": "ai",
"_action": "generateCode", "_action": "generateCode",
}, },
{
"id": "ai.consolidate",
"category": "ai",
"label": t("KI-Konsolidierung"),
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
"parameters": [
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
"description": t("Konsolidierungsmodus"), "default": "summarize"},
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
"outputPorts": {0: {"schema": "ConsolidateResult"}},
"meta": {"icon": "mdi-table-merge-cells", "color": "#9C27B0", "usesAi": True},
"_method": "ai",
"_action": "consolidate",
},
] ]

View file

@ -33,7 +33,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskList"}}, "outputPorts": {0: {"schema": "TaskList"}},
"meta": {"icon": "mdi-magnify", "color": "#7B68EE"}, "meta": {"icon": "mdi-magnify", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "searchTasks", "_action": "searchTasks",
}, },
@ -57,7 +57,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskList"}}, "outputPorts": {0: {"schema": "TaskList"}},
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE"}, "meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "listTasks", "_action": "listTasks",
}, },
@ -78,7 +78,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}}, "outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE"}, "meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "getTask", "_action": "getTask",
}, },
@ -123,7 +123,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}}, "outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE"}, "meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "createTask", "_action": "createTask",
}, },
@ -148,7 +148,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}}, "inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}}, "outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE"}, "meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "updateTask", "_action": "updateTask",
}, },
@ -171,7 +171,7 @@ CLICKUP_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-attachment", "color": "#7B68EE"}, "meta": {"icon": "mdi-attachment", "color": "#7B68EE", "usesAi": False},
"_method": "clickup", "_method": "clickup",
"_action": "uploadAttachment", "_action": "uploadAttachment",
}, },

View file

@ -0,0 +1,30 @@
# Copyright (c) 2025 Patrick Motsch
# Context node definitions — structural extraction without AI.
from modules.shared.i18nRegistry import t
CONTEXT_NODES = [
{
"id": "context.extractContent",
"category": "context",
"label": t("Inhalt extrahieren"),
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
"parameters": [
{"name": "outputDetail", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["full", "structure", "references"]},
"description": t("Detailgrad: full = alles, structure = Skelett, references = Dateireferenzen"),
"default": "full"},
{"name": "includeImages", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Bilder extrahieren"), "default": True},
{"name": "includeTables", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Tabellen extrahieren"), "default": True},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "UdmDocument"}},
"meta": {"icon": "mdi-file-tree-outline", "color": "#00897B", "usesAi": False},
"_method": "context",
"_action": "extractContent",
},
]

View file

@ -19,7 +19,7 @@ DATA_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AggregateResult"}}, "outputPorts": {0: {"schema": "AggregateResult"}},
"executor": "data", "executor": "data",
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B"}, "meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
}, },
{ {
"id": "data.transform", "id": "data.transform",
@ -35,7 +35,7 @@ DATA_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}}, "outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}},
"executor": "data", "executor": "data",
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B"}, "meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B", "usesAi": False},
}, },
{ {
"id": "data.filter", "id": "data.filter",
@ -45,12 +45,34 @@ DATA_NODES = [
"parameters": [ "parameters": [
{"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression", {"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression",
"description": t("Filterbedingung")}, "description": t("Filterbedingung")},
{"name": "udmContentType", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
], ],
"inputs": 1, "inputs": 1,
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "FileList", "TaskList", "EmailList", "DocumentList"]}}, "inputPorts": {0: {"accepts": ["AggregateResult", "FileList", "TaskList", "EmailList", "DocumentList", "UdmDocument", "UdmNodeList"]}},
"outputPorts": {0: {"schema": "Transit"}}, "outputPorts": {0: {"schema": "Transit"}},
"executor": "data", "executor": "data",
"meta": {"icon": "mdi-filter-outline", "color": "#607D8B"}, "meta": {"icon": "mdi-filter-outline", "color": "#607D8B", "usesAi": False},
},
{
"id": "data.consolidate",
"category": "data",
"label": t("Konsolidieren"),
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
"parameters": [
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
"description": t("Konsolidierungsmodus"), "default": "table"},
{"name": "separator", "type": "string", "required": False, "frontendType": "text",
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
"outputPorts": {0: {"schema": "ConsolidateResult"}},
"executor": "data",
"meta": {"icon": "mdi-table-merge-cells", "color": "#607D8B", "usesAi": False},
}, },
] ]

View file

@ -29,7 +29,7 @@ EMAIL_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "EmailList"}}, "outputPorts": {0: {"schema": "EmailList"}},
"meta": {"icon": "mdi-email-check", "color": "#1976D2"}, "meta": {"icon": "mdi-email-check", "color": "#1976D2", "usesAi": False},
"_method": "outlook", "_method": "outlook",
"_action": "readEmails", "_action": "readEmails",
}, },
@ -64,7 +64,7 @@ EMAIL_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "EmailList"}}, "outputPorts": {0: {"schema": "EmailList"}},
"meta": {"icon": "mdi-email-search", "color": "#1976D2"}, "meta": {"icon": "mdi-email-search", "color": "#1976D2", "usesAi": False},
"_method": "outlook", "_method": "outlook",
"_action": "searchEmails", "_action": "searchEmails",
}, },
@ -87,7 +87,7 @@ EMAIL_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit"]}}, "inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-email-edit", "color": "#1976D2"}, "meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
"_method": "outlook", "_method": "outlook",
"_action": "composeAndDraftEmailWithContext", "_action": "composeAndDraftEmailWithContext",
}, },

View file

@ -30,7 +30,7 @@ FILE_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}}, "inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3"}, "meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
"_method": "file", "_method": "file",
"_action": "create", "_action": "create",
}, },

View file

@ -24,7 +24,7 @@ FLOW_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}}, "outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
"executor": "flow", "executor": "flow",
"meta": {"icon": "mdi-source-branch", "color": "#FF9800"}, "meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
}, },
{ {
"id": "flow.switch", "id": "flow.switch",
@ -52,13 +52,13 @@ FLOW_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "Transit"}}, "outputPorts": {0: {"schema": "Transit"}},
"executor": "flow", "executor": "flow",
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800"}, "meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
}, },
{ {
"id": "flow.loop", "id": "flow.loop",
"category": "flow", "category": "flow",
"label": t("Schleife / Für Jedes"), "label": t("Schleife / Für Jedes"),
"description": t("Über Array-Elemente iterieren"), "description": t("Über Array-Elemente oder UDM-Strukturebenen iterieren"),
"parameters": [ "parameters": [
{ {
"name": "items", "name": "items",
@ -67,19 +67,37 @@ FLOW_NODES = [
"frontendType": "text", "frontendType": "text",
"description": t("Pfad zum Array"), "description": t("Pfad zum Array"),
}, },
{
"name": "level",
"type": "string",
"required": False,
"frontendType": "select",
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
"description": t("UDM-Iterationsebene"),
"default": "auto",
},
{
"name": "concurrency",
"type": "number",
"required": False,
"frontendType": "number",
"frontendOptions": {"min": 1, "max": 20},
"description": t("Parallele Iterationen (1 = sequentiell)"),
"default": 1,
},
], ],
"inputs": 1, "inputs": 1,
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit", "UdmDocument"]}},
"outputPorts": {0: {"schema": "LoopItem"}}, "outputPorts": {0: {"schema": "LoopItem"}},
"executor": "flow", "executor": "flow",
"meta": {"icon": "mdi-repeat", "color": "#FF9800"}, "meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
}, },
{ {
"id": "flow.merge", "id": "flow.merge",
"category": "flow", "category": "flow",
"label": t("Zusammenführen"), "label": t("Zusammenführen"),
"description": t("Mehrere Zweige zusammenführen"), "description": t("Mehrere Zweige zusammenführen (2-5 Eingänge)"),
"parameters": [ "parameters": [
{ {
"name": "mode", "name": "mode",
@ -90,12 +108,21 @@ FLOW_NODES = [
"description": t("Zusammenführungsmodus"), "description": t("Zusammenführungsmodus"),
"default": "first", "default": "first",
}, },
{
"name": "inputCount",
"type": "number",
"required": False,
"frontendType": "number",
"frontendOptions": {"min": 2, "max": 5},
"description": t("Anzahl Eingänge"),
"default": 2,
},
], ],
"inputs": 2, "inputs": 2,
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "MergeResult"}}, "outputPorts": {0: {"schema": "MergeResult"}},
"executor": "flow", "executor": "flow",
"meta": {"icon": "mdi-call-merge", "color": "#FF9800"}, "meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
}, },
] ]

View file

@ -24,7 +24,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}}, "outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0"}, "meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
}, },
{ {
"id": "input.approval", "id": "input.approval",
@ -45,7 +45,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}}, "outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50"}, "meta": {"icon": "mdi-check-decagram", "color": "#4CAF50", "usesAi": False},
}, },
{ {
"id": "input.upload", "id": "input.upload",
@ -68,7 +68,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-upload", "color": "#2196F3"}, "meta": {"icon": "mdi-upload", "color": "#2196F3", "usesAi": False},
}, },
{ {
"id": "input.comment", "id": "input.comment",
@ -86,7 +86,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TextResult"}}, "outputPorts": {0: {"schema": "TextResult"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-comment-text", "color": "#FF9800"}, "meta": {"icon": "mdi-comment-text", "color": "#FF9800", "usesAi": False},
}, },
{ {
"id": "input.review", "id": "input.review",
@ -105,7 +105,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}}, "outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7"}, "meta": {"icon": "mdi-magnify-scan", "color": "#673AB7", "usesAi": False},
}, },
{ {
"id": "input.selection", "id": "input.selection",
@ -123,7 +123,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TextResult"}}, "outputPorts": {0: {"schema": "TextResult"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-format-list-checks", "color": "#009688"}, "meta": {"icon": "mdi-format-list-checks", "color": "#009688", "usesAi": False},
}, },
{ {
"id": "input.confirmation", "id": "input.confirmation",
@ -143,6 +143,6 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}}, "outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input", "executor": "input",
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A"}, "meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A", "usesAi": False},
}, },
] ]

View file

@ -23,7 +23,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FileList"}}, "outputPorts": {0: {"schema": "FileList"}},
"meta": {"icon": "mdi-file-search", "color": "#0078D4"}, "meta": {"icon": "mdi-file-search", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "findDocumentPath", "_action": "findDocumentPath",
}, },
@ -43,7 +43,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}}, "inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-document", "color": "#0078D4"}, "meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "readDocuments", "_action": "readDocuments",
}, },
@ -63,7 +63,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-upload", "color": "#0078D4"}, "meta": {"icon": "mdi-upload", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "uploadFile", "_action": "uploadFile",
}, },
@ -83,7 +83,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FileList"}}, "outputPorts": {0: {"schema": "FileList"}},
"meta": {"icon": "mdi-folder-open", "color": "#0078D4"}, "meta": {"icon": "mdi-folder-open", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "listDocuments", "_action": "listDocuments",
}, },
@ -103,7 +103,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}}, "inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-download", "color": "#0078D4"}, "meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "downloadFileByPath", "_action": "downloadFileByPath",
}, },
@ -126,7 +126,7 @@ SHAREPOINT_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-content-copy", "color": "#0078D4"}, "meta": {"icon": "mdi-content-copy", "color": "#0078D4", "usesAi": False},
"_method": "sharepoint", "_method": "sharepoint",
"_action": "copyFile", "_action": "copyFile",
}, },

View file

@ -15,7 +15,7 @@ TRIGGER_NODES = [
"inputPorts": {}, "inputPorts": {},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"executor": "trigger", "executor": "trigger",
"meta": {"icon": "mdi-play", "color": "#4CAF50"}, "meta": {"icon": "mdi-play", "color": "#4CAF50", "usesAi": False},
}, },
{ {
"id": "trigger.form", "id": "trigger.form",
@ -36,7 +36,7 @@ TRIGGER_NODES = [
"inputPorts": {}, "inputPorts": {},
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}}, "outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}},
"executor": "trigger", "executor": "trigger",
"meta": {"icon": "mdi-form-select", "color": "#9C27B0"}, "meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
}, },
{ {
"id": "trigger.schedule", "id": "trigger.schedule",
@ -57,6 +57,6 @@ TRIGGER_NODES = [
"inputPorts": {}, "inputPorts": {},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"executor": "trigger", "executor": "trigger",
"meta": {"icon": "mdi-clock", "color": "#2196F3"}, "meta": {"icon": "mdi-clock", "color": "#2196F3", "usesAi": False},
}, },
] ]

View file

@ -23,7 +23,7 @@ TRUSTEE_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50"}, "meta": {"icon": "mdi-database-refresh", "color": "#4CAF50", "usesAi": False},
"_method": "trustee", "_method": "trustee",
"_action": "refreshAccountingData", "_action": "refreshAccountingData",
}, },
@ -47,7 +47,7 @@ TRUSTEE_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}}, "outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50"}, "meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
"_method": "trustee", "_method": "trustee",
"_action": "extractFromFiles", "_action": "extractFromFiles",
}, },
@ -66,7 +66,7 @@ TRUSTEE_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}}, "inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50"}, "meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
"_method": "trustee", "_method": "trustee",
"_action": "processDocuments", "_action": "processDocuments",
}, },
@ -85,7 +85,7 @@ TRUSTEE_NODES = [
"outputs": 1, "outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}}, "inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}}, "outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-calculator", "color": "#4CAF50"}, "meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
"_method": "trustee", "_method": "trustee",
"_action": "syncToAccounting", "_action": "syncToAccounting",
}, },

View file

@ -88,6 +88,7 @@ def getNodeTypesForApi(
{"id": "input", "label": "Eingabe/Mensch"}, {"id": "input", "label": "Eingabe/Mensch"},
{"id": "flow", "label": "Ablauf"}, {"id": "flow", "label": "Ablauf"},
{"id": "data", "label": "Daten"}, {"id": "data", "label": "Daten"},
{"id": "context", "label": "Kontext"},
{"id": "ai", "label": "KI"}, {"id": "ai", "label": "KI"},
{"id": "file", "label": "Datei"}, {"id": "file", "label": "Datei"},
{"id": "email", "label": "E-Mail"}, {"id": "email", "label": "E-Mail"},

View file

@ -152,6 +152,21 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
description="Ergebnisdaten"), description="Ergebnisdaten"),
]), ]),
"Transit": PortSchema(name="Transit", fields=[]), "Transit": PortSchema(name="Transit", fields=[]),
"UdmDocument": PortSchema(name="UdmDocument", fields=[
PortField(name="id", type="str", description="Dokument-ID"),
PortField(name="sourceType", type="str", description="Quellformat (pdf, docx, …)"),
PortField(name="sourcePath", type="str", description="Quellpfad"),
PortField(name="children", type="List[Any]", description="StructuralNodes"),
]),
"UdmNodeList": PortSchema(name="UdmNodeList", fields=[
PortField(name="nodes", type="List[Any]", description="UDM StructuralNodes oder ContentBlocks"),
PortField(name="count", type="int", description="Anzahl"),
]),
"ConsolidateResult": PortSchema(name="ConsolidateResult", fields=[
PortField(name="result", type="Any", description="Konsolidiertes Ergebnis"),
PortField(name="mode", type="str", description="Konsolidierungsmodus"),
PortField(name="count", type="int", description="Anzahl verarbeiteter Elemente"),
]),
} }
@ -412,6 +427,36 @@ def _extractMergeResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
} }
def _extractUdmDocument(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract UdmDocument fields from upstream output."""
if upstream.get("children") is not None and upstream.get("sourceType"):
return upstream
udm = upstream.get("udm")
if isinstance(udm, dict) and udm.get("children") is not None:
return udm
return {}
def _extractUdmNodeList(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract UdmNodeList fields from upstream output."""
nodes = upstream.get("nodes")
if isinstance(nodes, list):
return {"nodes": nodes, "count": len(nodes)}
children = upstream.get("children")
if isinstance(children, list):
return {"nodes": children, "count": len(children)}
return {}
def _extractConsolidateResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract ConsolidateResult fields from upstream output."""
result = {}
for key in ("result", "mode", "count"):
if key in upstream:
result[key] = upstream[key]
return result
INPUT_EXTRACTORS: Dict[str, Callable] = { INPUT_EXTRACTORS: Dict[str, Callable] = {
"EmailDraft": _extractEmailDraft, "EmailDraft": _extractEmailDraft,
"DocumentList": _extractDocuments, "DocumentList": _extractDocuments,
@ -425,6 +470,9 @@ INPUT_EXTRACTORS: Dict[str, Callable] = {
"TaskResult": _extractTaskResult, "TaskResult": _extractTaskResult,
"AggregateResult": _extractAggregateResult, "AggregateResult": _extractAggregateResult,
"MergeResult": _extractMergeResult, "MergeResult": _extractMergeResult,
"UdmDocument": _extractUdmDocument,
"UdmNodeList": _extractUdmNodeList,
"ConsolidateResult": _extractConsolidateResult,
} }

View file

@ -27,15 +27,33 @@ class DataNeutraliserConfig(PowerOnModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="ID of the mandate this configuration belongs to", description="ID of the mandate this configuration belongs to",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="ID of the feature instance this configuration belongs to", description="ID of the feature instance this configuration belongs to",
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
userId: str = Field( userId: str = Field(
description="ID of the user who created this configuration", description="ID of the user who created this configuration",
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
enabled: bool = Field( enabled: bool = Field(
default=True, default=True,
@ -84,15 +102,33 @@ class DataNeutralizerAttributes(BaseModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="ID of the mandate this attribute belongs to", description="ID of the mandate this attribute belongs to",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="ID of the feature instance this attribute belongs to", description="ID of the feature instance this attribute belongs to",
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
userId: str = Field( userId: str = Field(
description="ID of the user who created this attribute", description="ID of the user who created this attribute",
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
originalText: str = Field( originalText: str = Field(
description="Original text that was neutralized", description="Original text that was neutralized",
@ -101,7 +137,13 @@ class DataNeutralizerAttributes(BaseModel):
fileId: Optional[str] = Field( fileId: Optional[str] = Field(
default=None, default=None,
description="ID of the file this attribute belongs to", description="ID of the file this attribute belongs to",
json_schema_extra={"label": "Datei-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}, json_schema_extra={
"label": "Datei-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"},
},
) )
patternType: str = Field( patternType: str = Field(
description="Type of pattern that matched (email, phone, name, etc.)", description="Type of pattern that matched (email, phone, name, etc.)",
@ -118,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID"}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID"}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
) )
userId: str = Field( userId: str = Field(
description="User who triggered neutralization", description="User who triggered neutralization",
json_schema_extra={"label": "Benutzer-ID"}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
) )
sourceLabel: str = Field( sourceLabel: str = Field(
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'", description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",

View file

@ -14,6 +14,7 @@ from modules.features.neutralization.datamodelFeatureNeutralizer import (
DataNeutralizationSnapshot, DataNeutralizationSnapshot,
) )
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp from modules.shared.timeUtils import getUtcTimestamp
@ -21,6 +22,9 @@ from modules.datamodels.datamodelUam import User
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
neutralizationDatabase = "poweron_neutralization"
registerDatabase(neutralizationDatabase)
# Singleton cache for interface instances # Singleton cache for interface instances
_neutralizerInterfaces = {} _neutralizerInterfaces = {}
@ -54,7 +58,7 @@ class InterfaceFeatureNeutralizer:
try: try:
# Use same database config pattern as other feature interfaces # Use same database config pattern as other feature interfaces
dbHost = APP_CONFIG.get("DB_HOST", "localhost") dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = "poweron_neutralization" dbDatabase = neutralizationDatabase
dbUser = APP_CONFIG.get("DB_USER", "postgres") dbUser = APP_CONFIG.get("DB_USER", "postgres")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -284,9 +284,12 @@ class Kanton(PowerOnModel):
id_land: Optional[str] = Field( id_land: Optional[str] = Field(
None, None,
description="Land ID (Foreign Key) - eindeutiger Link zum Land, in welchem Land der Kanton liegt", description="Land ID (Foreign Key) - eindeutiger Link zum Land, in welchem Land der Kanton liegt",
frontend_type="text", json_schema_extra={
frontend_readonly=False, "frontend_type": "text",
frontend_required=False, "frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Land"},
},
) )
abk: Optional[str] = Field( abk: Optional[str] = Field(
None, None,
@ -341,9 +344,12 @@ class Gemeinde(BaseModel):
id_kanton: Optional[str] = Field( id_kanton: Optional[str] = Field(
None, None,
description="Kanton ID (Foreign Key) - eindeutiger Link zum Kanton, in welchem Kanton die Gemeinde liegt", description="Kanton ID (Foreign Key) - eindeutiger Link zum Kanton, in welchem Kanton die Gemeinde liegt",
frontend_type="text", json_schema_extra={
frontend_readonly=False, "frontend_type": "text",
frontend_required=False, "frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Kanton"},
},
) )
plz: Optional[str] = Field( plz: Optional[str] = Field(
None, None,
@ -387,17 +393,23 @@ class Parzelle(PowerOnModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="ID of the mandate", description="ID of the mandate",
frontend_type="text", json_schema_extra={
frontend_readonly=True, "frontend_type": "text",
frontend_required=False, "frontend_readonly": True,
label="Mandats-ID", "frontend_required": False,
"label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="ID of the feature instance", description="ID of the feature instance",
frontend_type="text", json_schema_extra={
frontend_readonly=True, "frontend_type": "text",
frontend_required=False, "frontend_readonly": True,
label="Feature-Instanz-ID", "frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
# Grunddaten # Grunddaten
@ -456,9 +468,12 @@ class Parzelle(PowerOnModel):
kontextGemeinde: Optional[str] = Field( kontextGemeinde: Optional[str] = Field(
None, None,
description="Municipality ID (Foreign Key)", description="Municipality ID (Foreign Key)",
frontend_type="text", json_schema_extra={
frontend_readonly=False, "frontend_type": "text",
frontend_required=False, "frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"},
},
) )
# Bebauungsparameter # Bebauungsparameter
@ -618,17 +633,23 @@ class Projekt(PowerOnModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="ID of the mandate", description="ID of the mandate",
frontend_type="text", json_schema_extra={
frontend_readonly=True, "frontend_type": "text",
frontend_required=False, "frontend_readonly": True,
label="Mandats-ID", "frontend_required": False,
"label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="ID of the feature instance", description="ID of the feature instance",
frontend_type="text", json_schema_extra={
frontend_readonly=True, "frontend_type": "text",
frontend_required=False, "frontend_readonly": True,
label="Feature-Instanz-ID", "frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
label: str = Field( label: str = Field(
description="Project designation", description="Project designation",

View file

@ -21,6 +21,7 @@ from .datamodelFeatureRealEstate import (
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.security.rbac import RbacClass from modules.security.rbac import RbacClass
from modules.datamodels.datamodelRbac import AccessRuleContext from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.datamodels.datamodelUam import AccessLevel from modules.datamodels.datamodelUam import AccessLevel
@ -29,6 +30,9 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
realEstateDatabase = "poweron_realestate"
registerDatabase(realEstateDatabase)
# Singleton factory for Real Estate interfaces # Singleton factory for Real Estate interfaces
_realEstateInterfaces = {} _realEstateInterfaces = {}
@ -71,7 +75,7 @@ class RealEstateObjects:
try: try:
# Get database configuration from environment # Get database configuration from environment
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_realestate" dbDatabase = realEstateDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -11,6 +11,7 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from .datamodelTeamsbot import ( from .datamodelTeamsbot import (
TeamsbotSession, TeamsbotSession,
@ -24,6 +25,9 @@ from .datamodelTeamsbot import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
teamsbotDatabase = "poweron_teamsbot"
registerDatabase(teamsbotDatabase)
# Singleton factory # Singleton factory
_interfaces = {} _interfaces = {}
@ -50,7 +54,7 @@ class TeamsbotObjects:
self.userId = str(currentUser.id) if currentUser else "system" self.userId = str(currentUser.id) if currentUser else "system"
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_teamsbot" dbDatabase = teamsbotDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -46,6 +46,7 @@ class TrusteeOrganisation(PowerOnModel):
description="Mandate ID (system-level organisation)", description="Mandate ID (system-level organisation)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -56,6 +57,7 @@ class TrusteeOrganisation(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -90,6 +92,7 @@ class TrusteeRole(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -100,6 +103,7 @@ class TrusteeRole(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -127,7 +131,8 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options" "frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
} }
) )
roleId: str = Field( roleId: str = Field(
@ -137,7 +142,8 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/roles/options" "frontend_options": "/api/trustee/{instanceId}/roles/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole"},
} }
) )
userId: str = Field( userId: str = Field(
@ -147,7 +153,8 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/users/options" "frontend_options": "/api/users/options",
"fk_target": {"db": "poweron_app", "table": "User"},
} }
) )
contractId: Optional[str] = Field( contractId: Optional[str] = Field(
@ -159,7 +166,8 @@ class TrusteeAccess(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/contracts/options", "frontend_options": "/api/trustee/{instanceId}/contracts/options",
"frontend_depends_on": "organisationId" "frontend_depends_on": "organisationId",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract"},
} }
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
@ -167,6 +175,7 @@ class TrusteeAccess(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -177,6 +186,7 @@ class TrusteeAccess(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -204,7 +214,8 @@ class TrusteeContract(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, # Editable at creation, then readonly "frontend_readonly": False, # Editable at creation, then readonly
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options" "frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
} }
) )
label: str = Field( label: str = Field(
@ -231,6 +242,7 @@ class TrusteeContract(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -241,6 +253,7 @@ class TrusteeContract(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -297,7 +310,8 @@ class TrusteeDocument(PowerOnModel):
"label": "Datei-Referenz", "label": "Datei-Referenz",
"frontend_type": "file_reference", "frontend_type": "file_reference",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False "frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"},
} }
) )
documentName: str = Field( documentName: str = Field(
@ -345,6 +359,7 @@ class TrusteeDocument(PowerOnModel):
description="Mandate ID (auto-set from context)", description="Mandate ID (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -356,6 +371,7 @@ class TrusteeDocument(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)", description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -422,7 +438,8 @@ class TrusteePosition(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options" "frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
} }
) )
bankDocumentId: Optional[str] = Field( bankDocumentId: Optional[str] = Field(
@ -433,7 +450,8 @@ class TrusteePosition(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options" "frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
} }
) )
valuta: Optional[str] = Field( valuta: Optional[str] = Field(
@ -677,6 +695,7 @@ class TrusteePosition(PowerOnModel):
description="Mandate ID (auto-set from context)", description="Mandate ID (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -688,6 +707,7 @@ class TrusteePosition(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)", description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -718,8 +738,8 @@ class TrusteeDataAccount(PowerOnModel):
accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"}) accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"})
currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"})
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"}) isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@i18nModel("Buchung (Sync)") @i18nModel("Buchung (Sync)")
class TrusteeDataJournalEntry(PowerOnModel): class TrusteeDataJournalEntry(PowerOnModel):
@ -731,14 +751,14 @@ class TrusteeDataJournalEntry(PowerOnModel):
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"}) description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
totalAmount: float = Field(default=0.0, description="Total amount of entry", json_schema_extra={"label": "Betrag"}) totalAmount: float = Field(default=0.0, description="Total amount of entry", json_schema_extra={"label": "Betrag"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@i18nModel("Buchungszeile (Sync)") @i18nModel("Buchungszeile (Sync)")
class TrusteeDataJournalLine(PowerOnModel): class TrusteeDataJournalLine(PowerOnModel):
"""Journal entry line (debit/credit) synced from external accounting system.""" """Journal entry line (debit/credit) synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung"}) journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"}) accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll"}) debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll"})
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben"}) creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben"})
@ -746,8 +766,8 @@ class TrusteeDataJournalLine(PowerOnModel):
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"}) taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"}) costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
description: str = Field(default="", json_schema_extra={"label": "Beschreibung"}) description: str = Field(default="", json_schema_extra={"label": "Beschreibung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@i18nModel("Kontakt (Sync)") @i18nModel("Kontakt (Sync)")
class TrusteeDataContact(PowerOnModel): class TrusteeDataContact(PowerOnModel):
@ -764,8 +784,8 @@ class TrusteeDataContact(PowerOnModel):
email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"}) email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"})
phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"}) phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"})
vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."}) vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@i18nModel("Kontosaldo (Sync)") @i18nModel("Kontosaldo (Sync)")
class TrusteeDataAccountBalance(PowerOnModel): class TrusteeDataAccountBalance(PowerOnModel):
@ -779,8 +799,8 @@ class TrusteeDataAccountBalance(PowerOnModel):
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz"}) creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz"})
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo"}) closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@i18nModel("Buchhaltungs-Konfiguration") @i18nModel("Buchhaltungs-Konfiguration")
class TrusteeAccountingConfig(PowerOnModel): class TrusteeAccountingConfig(PowerOnModel):
@ -790,7 +810,7 @@ class TrusteeAccountingConfig(PowerOnModel):
Credentials are stored encrypted (decrypted at runtime by the AccountingBridge). Credentials are stored encrypted (decrypted at runtime by the AccountingBridge).
""" """
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz"}) featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"}) connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"})
displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"}) displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"})
encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"}) encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"})
@ -800,7 +820,7 @@ class TrusteeAccountingConfig(PowerOnModel):
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"}) lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"}) cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"}) chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
@i18nModel("Buchhaltungs-Synchronisation") @i18nModel("Buchhaltungs-Synchronisation")
class TrusteeAccountingSync(PowerOnModel): class TrusteeAccountingSync(PowerOnModel):
@ -809,8 +829,11 @@ class TrusteeAccountingSync(PowerOnModel):
Used for duplicate prevention, audit trail, and retry logic. Used for duplicate prevention, audit trail, and retry logic.
""" """
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
positionId: str = Field(description="FK -> TrusteePosition.id", json_schema_extra={"label": "Position"}) positionId: str = Field(
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz"}) description="FK -> TrusteePosition.id",
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition"}},
)
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"}) connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"})
externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"}) externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"})
externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"}) externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"})
@ -819,5 +842,5 @@ class TrusteeAccountingSync(PowerOnModel):
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"}) syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"})
errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"}) errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"})
bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"}) bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})

View file

@ -14,6 +14,7 @@ from pydantic import ValidationError
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC, getDistinctColumnValuesWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC, getDistinctColumnValuesWithRBAC
from modules.security.rbac import RbacClass from modules.security.rbac import RbacClass
from modules.datamodels.datamodelUam import User, AccessLevel from modules.datamodels.datamodelUam import User, AccessLevel
@ -30,6 +31,9 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
trusteeDatabase = "poweron_trustee"
registerDatabase(trusteeDatabase)
# Singleton factory for TrusteeObjects instances per context # Singleton factory for TrusteeObjects instances per context
_trusteeInterfaces = {} _trusteeInterfaces = {}
@ -276,7 +280,7 @@ class TrusteeObjects:
"""Initializes the database connection directly.""" """Initializes the database connection directly."""
try: try:
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_trustee" dbDatabase = trusteeDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -1563,7 +1563,13 @@ async def sync_positions_to_accounting(
raise HTTPException(status_code=400, detail=routeApiMsg("positionIds required")) raise HTTPException(status_code=400, detail=routeApiMsg("positionIds required"))
results = await bridge.pushBatchToAccounting(instanceId, positionIds) results = await bridge.pushBatchToAccounting(instanceId, positionIds)
failed = [r for r in results if not r.success] skipped = [r for r in results if not r.success and r.errorMessage and "already synced" in r.errorMessage]
failed = [r for r in results if not r.success and r not in skipped]
if skipped:
logger.info(
"Accounting sync: %s position(s) already synced, skipped",
len(skipped),
)
if failed: if failed:
logger.warning( logger.warning(
"Accounting sync had %s failure(s): %s", "Accounting sync had %s failure(s): %s",
@ -1573,7 +1579,8 @@ async def sync_positions_to_accounting(
return { return {
"total": len(results), "total": len(results),
"success": sum(1 for r in results if r.success), "success": sum(1 for r in results if r.success),
"errors": sum(1 for r in results if not r.success), "skipped": len(skipped),
"errors": len(failed),
"results": [r.model_dump() for r in results], "results": [r.model_dump() for r in results],
} }

View file

@ -19,15 +19,33 @@ class WorkspaceUserSettings(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="User ID", description="User ID",
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
) )
mandateId: str = Field( mandateId: str = Field(
description="Mandate ID", description="Mandate ID",
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="Feature Instance ID", description="Feature Instance ID",
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
) )
maxAgentRounds: Optional[int] = Field( maxAgentRounds: Optional[int] = Field(
default=None, default=None,

View file

@ -9,6 +9,7 @@ import logging
from typing import Dict, Any, Optional from typing import Dict, Any, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.features.workspace.datamodelFeatureWorkspace import WorkspaceUserSettings from modules.features.workspace.datamodelFeatureWorkspace import WorkspaceUserSettings
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
@ -17,6 +18,9 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
workspaceDatabase = "poweron_workspace"
registerDatabase(workspaceDatabase)
_workspaceInterfaces: Dict[str, "WorkspaceObjects"] = {} _workspaceInterfaces: Dict[str, "WorkspaceObjects"] = {}
@ -39,7 +43,7 @@ class WorkspaceObjects:
def _initializeDatabase(self): def _initializeDatabase(self):
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_workspace" dbDatabase = workspaceDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -1464,18 +1464,18 @@ async def listFeatureConnectionTables(
tables = [] tables = []
for obj in accessible: for obj in accessible:
meta = obj.get("meta", {}) meta = obj.get("meta", {})
if meta.get("wildcard"):
continue
node = { node = {
"objectKey": obj.get("objectKey", ""), "objectKey": obj.get("objectKey", ""),
"tableName": meta.get("table", ""), "tableName": meta.get("table", ""),
"label": resolveText(obj.get("label", "")), "label": resolveText(obj.get("label", "")),
"fields": meta.get("fields", []), "fields": meta.get("fields", []),
"isParent": bool(meta.get("isParent", False)),
"parentTable": meta.get("parentTable") or None,
"parentKey": meta.get("parentKey") or None,
"displayFields": meta.get("displayFields", []),
} }
if meta.get("isParent"):
node["isParent"] = True
node["displayFields"] = meta.get("displayFields", [])
if meta.get("parentTable"):
node["parentTable"] = meta["parentTable"]
node["parentKey"] = meta.get("parentKey", "")
tables.append(node) tables.append(node)
return JSONResponse({"tables": tables}) return JSONResponse({"tables": tables})

View file

@ -159,11 +159,12 @@ def _bootstrapSystemTemplates(db: DatabaseConnector) -> None:
""" """
try: try:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
import uuid import uuid
greenfieldDb = DatabaseConnector( greenfieldDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"), dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_graphicaleditor", dbDatabase=graphicalEditorDatabase,
dbUser=APP_CONFIG.get("DB_USER"), dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"), dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
) )
@ -1509,12 +1510,16 @@ def _ensureUiContextRules(db: DatabaseConnector) -> None:
if roleId and item: if roleId and item:
existingCombinations.add((roleId, item)) existingCombinations.add((roleId, item))
# Check each navigation item and add missing rules # Check each navigation item and add missing rules (including subgroup items)
missingRules = [] missingRules = []
for section in NAVIGATION_SECTIONS: for section in NAVIGATION_SECTIONS:
isAdminSection = section.get("adminOnly", False) isAdminSection = section.get("adminOnly", False)
for item in section.get("items", []): allItems = list(section.get("items", []))
for subgroup in section.get("subgroups", []):
allItems.extend(subgroup.get("items", []))
for item in allItems:
objectKey = item.get("objectKey") objectKey = item.get("objectKey")
if not objectKey: if not objectKey:
continue continue
@ -1864,6 +1869,7 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
"resource.store.teamsbot", "resource.store.teamsbot",
"resource.store.workspace", "resource.store.workspace",
"resource.store.commcoach", "resource.store.commcoach",
"resource.store.trustee",
] ]
storeRules = [] storeRules = []

View file

@ -17,6 +17,7 @@ import uuid
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.security.rbac import RbacClass from modules.security.rbac import RbacClass
@ -48,6 +49,9 @@ from modules.datamodels.datamodelNotification import UserNotification
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
appDatabase = "poweron_app"
registerDatabase(appDatabase)
# Singleton factory for AppObjects instances per context # Singleton factory for AppObjects instances per context
_gatewayInterfaces = {} _gatewayInterfaces = {}
@ -133,7 +137,7 @@ class AppObjects:
try: try:
# Get configuration values with defaults # Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_app" dbDatabase = appDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -1894,11 +1898,12 @@ class AppObjects:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import ( from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoWorkflow, AutoVersion, AutoRun, AutoStepLog, AutoTask, AutoWorkflow, AutoVersion, AutoRun, AutoStepLog, AutoTask,
) )
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
geDb = DatabaseConnector( geDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"), dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_graphicaleditor", dbDatabase=graphicalEditorDatabase,
dbUser=APP_CONFIG.get("DB_USER"), dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"), dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)), dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),

View file

@ -14,6 +14,7 @@ import uuid
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp from modules.shared.timeUtils import getUtcTimestamp
from modules.datamodels.datamodelUam import User, Mandate from modules.datamodels.datamodelUam import User, Mandate
from modules.datamodels.datamodelMembership import UserMandate from modules.datamodels.datamodelMembership import UserMandate
@ -109,6 +110,7 @@ _billingInterfaces: Dict[str, "BillingObjects"] = {}
# Database name for billing # Database name for billing
BILLING_DATABASE = "poweron_billing" BILLING_DATABASE = "poweron_billing"
registerDatabase(BILLING_DATABASE)
def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects": def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects":
@ -1540,6 +1542,28 @@ class BillingObjects:
if not accountIds: if not accountIds:
return PaginatedResult(items=[], totalItems=0, totalPages=0) return PaginatedResult(items=[], totalItems=0, totalPages=0)
# Extract free-text search term and run a custom query that covers
# enriched columns (mandateName, userName) and the numeric amount
# column. The generic SQL search only covers TEXT columns of the
# BillingTransaction table, which excludes these fields.
searchTerm: Optional[str] = None
if mappedPagination and mappedPagination.filters:
raw = mappedPagination.filters.get("search")
if isinstance(raw, str) and raw.strip():
searchTerm = raw.strip()
if searchTerm:
searchResult = self._searchTransactionsPaginated(
allAccounts=allAccounts,
accountIds=accountIds,
userId=userId,
searchTerm=searchTerm,
pagination=mappedPagination,
)
pageItems = searchResult["items"]
totalItems = searchResult["totalItems"]
totalPages = searchResult["totalPages"]
else:
recordFilter: Dict[str, Any] = {"accountId": accountIds} recordFilter: Dict[str, Any] = {"accountId": accountIds}
if userId: if userId:
recordFilter["createdByUserId"] = userId recordFilter["createdByUserId"] = userId
@ -1550,6 +1574,8 @@ class BillingObjects:
recordFilter=recordFilter, recordFilter=recordFilter,
) )
pageItems = result.get("items", []) if isinstance(result, dict) else result.items pageItems = result.get("items", []) if isinstance(result, dict) else result.items
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
accountMap = {a.get("id"): a for a in allAccounts} accountMap = {a.get("id"): a for a in allAccounts}
@ -1592,15 +1618,186 @@ class BillingObjects:
row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
enriched.append(row) enriched.append(row)
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages) return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages)
except Exception as e: except Exception as e:
logger.error(f"Error in getTransactionsForMandatesPaginated: {e}") logger.error(f"Error in getTransactionsForMandatesPaginated: {e}")
return PaginatedResult(items=[], totalItems=0, totalPages=0) return PaginatedResult(items=[], totalItems=0, totalPages=0)
def _searchTransactionsPaginated(
self,
allAccounts: List[Dict[str, Any]],
accountIds: List[str],
userId: Optional[str],
searchTerm: str,
pagination: PaginationParams,
) -> Dict[str, Any]:
"""
Custom paginated search for BillingTransaction that also covers the
enriched columns `mandateName` and `userName` as well as the numeric
`amount` column. Resolves matching mandate/user IDs via the app DB
first, then builds a single SQL query with OR-combined conditions.
"""
import math
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
from modules.datamodels.datamodelUam import UserInDB
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
table = BillingTransaction.__name__
fields = _get_model_fields(BillingTransaction)
pattern = f"%{searchTerm}%"
# Resolve matching user / mandate IDs via the app DB (which is separate
# from the billing DB and hosts UserInDB / Mandate tables).
matchingUserIds: List[str] = []
matchingMandateIds: List[str] = []
try:
appInterface = getAppInterface(self.currentUser)
appInterface.db._ensure_connection()
with appInterface.db.connection.cursor() as cur:
if appInterface.db._ensureTableExists(UserInDB):
cur.execute(
'SELECT "id" FROM "UserInDB" WHERE '
'COALESCE("username", \'\') ILIKE %s OR '
'COALESCE("fullName", \'\') ILIKE %s OR '
'COALESCE("email", \'\') ILIKE %s',
(pattern, pattern, pattern),
)
matchingUserIds = [r["id"] for r in cur.fetchall() if r.get("id")]
if appInterface.db._ensureTableExists(Mandate):
cur.execute(
'SELECT "id" FROM "Mandate" WHERE '
'COALESCE("label", \'\') ILIKE %s OR '
'COALESCE("name", \'\') ILIKE %s',
(pattern, pattern),
)
matchingMandateIds = [r["id"] for r in cur.fetchall() if r.get("id")]
except Exception as e:
logger.warning(f"_searchTransactionsPaginated: user/mandate resolution failed: {e}")
matchingAccountIds = [
a.get("id") for a in allAccounts
if a.get("id") and a.get("mandateId") in set(matchingMandateIds)
]
# Try to interpret the search term as a number for amount matching.
amountVal: Optional[float] = None
try:
amountVal = float(searchTerm.replace(",", "."))
except Exception:
amountVal = None
whereParts: List[str] = ['"accountId" = ANY(%s)']
whereValues: List[Any] = [accountIds]
if userId:
whereParts.append('"createdByUserId" = %s')
whereValues.append(userId)
# Apply non-search filters from pagination (reuse existing builder for
# everything except the `search` key which we handle explicitly).
import copy
paginationWithoutSearch = copy.deepcopy(pagination) if pagination else None
if paginationWithoutSearch and paginationWithoutSearch.filters:
paginationWithoutSearch.filters = {
k: v for k, v in paginationWithoutSearch.filters.items() if k != "search"
}
orParts: List[str] = []
orValues: List[Any] = []
textCols = [c for c, t in fields.items() if t == "TEXT"]
for col in textCols:
orParts.append(f'COALESCE("{col}"::TEXT, \'\') ILIKE %s')
orValues.append(pattern)
if matchingUserIds:
orParts.append('"createdByUserId" = ANY(%s)')
orValues.append(matchingUserIds)
if matchingAccountIds:
orParts.append('"accountId" = ANY(%s)')
orValues.append(matchingAccountIds)
orParts.append('"amount"::TEXT ILIKE %s')
orValues.append(pattern)
if amountVal is not None:
orParts.append('"amount" = %s')
orValues.append(amountVal)
whereParts.append(f"({' OR '.join(orParts)})")
whereValues.extend(orValues)
# Apply remaining structured filters via the generic helper by feeding
# it a dummy pagination that does NOT include LIMIT/OFFSET. We only
# need the WHERE contribution for the non-search filters here.
extraWhere = ""
extraValues: List[Any] = []
if paginationWithoutSearch and paginationWithoutSearch.filters:
try:
fromPagination = copy.deepcopy(paginationWithoutSearch)
fromPagination.sort = []
fromPagination.page = 1
fromPagination.pageSize = 1
ew, _, _, values, _ = self.db._buildPaginationClauses(
BillingTransaction, fromPagination, recordFilter=None
)
if ew:
extraWhere = ew.replace(" WHERE ", " AND ", 1)
extraValues = list(values)
except Exception as e:
logger.warning(f"_searchTransactionsPaginated: extra-filter build failed: {e}")
whereClause = " WHERE " + " AND ".join(whereParts) + extraWhere
whereValues.extend(extraValues)
# Build ORDER BY from pagination.sort
validColumns = set(fields.keys())
orderParts: List[str] = []
if pagination and pagination.sort:
for sf in pagination.sort:
sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None)
sfDir = sf.get("direction", "asc") if isinstance(sf, dict) else getattr(sf, "direction", "asc")
if sfField and sfField in validColumns:
direction = "DESC" if str(sfDir).lower() == "desc" else "ASC"
colType = fields.get(sfField, "TEXT")
if colType == "BOOLEAN":
orderParts.append(f'COALESCE("{sfField}", FALSE) {direction}')
else:
orderParts.append(f'"{sfField}" {direction} NULLS LAST')
if not orderParts:
orderParts.append('"id"')
orderClause = " ORDER BY " + ", ".join(orderParts)
pageSize = pagination.pageSize if pagination else 50
page = pagination.page if pagination else 1
offset = (page - 1) * pageSize
limitClause = f" LIMIT {pageSize} OFFSET {offset}"
try:
self.db._ensure_connection()
with self.db.connection.cursor() as cur:
countSql = f'SELECT COUNT(*) FROM "{table}"{whereClause}'
cur.execute(countSql, whereValues)
totalItems = cur.fetchone()["count"]
dataSql = f'SELECT * FROM "{table}"{whereClause}{orderClause}{limitClause}'
cur.execute(dataSql, whereValues)
records = [dict(row) for row in cur.fetchall()]
for rec in records:
_parseRecordFields(rec, fields, f"search table {table}")
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
except Exception as e:
logger.error(f"_searchTransactionsPaginated SQL error: {e}", exc_info=True)
try:
self.db.connection.rollback()
except Exception:
pass
return {"items": [], "totalItems": 0, "totalPages": 0}
def _buildScopeFilter( def _buildScopeFilter(
self, self,
mandateIds: Optional[List[str]], mandateIds: Optional[List[str]],

View file

@ -29,6 +29,7 @@ from modules.datamodels.datamodelUam import User
# DYNAMIC PART: Connectors to the Interface # DYNAMIC PART: Connectors to the Interface
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResult from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResult
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
@ -37,6 +38,9 @@ from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
chatDatabase = "poweron_chat"
registerDatabase(chatDatabase)
# Singleton factory for Chat instances # Singleton factory for Chat instances
_chatInterfaces = {} _chatInterfaces = {}
@ -314,7 +318,7 @@ class ChatObjects:
try: try:
# Get configuration values with defaults # Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_chat" dbDatabase = chatDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -12,6 +12,7 @@ from datetime import datetime, timezone, timedelta
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from modules.connectors.connectorDbPostgre import _get_cached_connector from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
@ -19,6 +20,9 @@ from modules.shared.timeUtils import getUtcTimestamp
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
knowledgeDatabase = "poweron_knowledge"
registerDatabase(knowledgeDatabase)
_instances: Dict[str, "KnowledgeObjects"] = {} _instances: Dict[str, "KnowledgeObjects"] = {}
@ -34,7 +38,7 @@ class KnowledgeObjects:
def _initializeDatabase(self): def _initializeDatabase(self):
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_knowledge" dbDatabase = knowledgeDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -14,6 +14,7 @@ import mimetypes
from typing import Dict, Any, List, Optional, Union from typing import Dict, Any, List, Optional, Union
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC
from modules.security.rbac import RbacClass from modules.security.rbac import RbacClass
from modules.datamodels.datamodelRbac import AccessRuleContext from modules.datamodels.datamodelRbac import AccessRuleContext
@ -34,6 +35,9 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
managementDatabase = "poweron_management"
registerDatabase(managementDatabase)
# Singleton factory for Management instances with AI service per context # Singleton factory for Management instances with AI service per context
_instancesManagement = {} _instancesManagement = {}
@ -127,7 +131,7 @@ class ComponentObjects:
try: try:
# Get configuration values with defaults # Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data") dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = "poweron_management" dbDatabase = managementDatabase
dbUser = APP_CONFIG.get("DB_USER") dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432)) dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -1087,12 +1091,15 @@ class ComponentObjects:
return newfileName return newfileName
counter += 1 counter += 1
def createFile(self, name: str, mimeType: str, content: bytes) -> FileItem: def createFile(self, name: str, mimeType: str, content: bytes, folderId: Optional[str] = None) -> FileItem:
"""Creates a new file entry if user has permission. Computes fileHash and fileSize from content. """Creates a new file entry if user has permission. Computes fileHash and fileSize from content.
Duplicate check: if a file with the same user + fileHash + fileName already exists, Duplicate check: if a file with the same user + fileHash + fileName already exists,
the existing file is returned instead of creating a new one. the existing file is returned instead of creating a new one.
Same hash with different name is allowed (intentional copy by user). Same hash with different name is allowed (intentional copy by user).
Args:
folderId: Optional parent folder ID. None/empty means the root folder.
""" """
if not self.checkRbacPermission(FileItem, "create"): if not self.checkRbacPermission(FileItem, "create"):
raise PermissionError("No permission to create files") raise PermissionError("No permission to create files")
@ -1120,6 +1127,11 @@ class ComponentObjects:
else: else:
scope = "personal" scope = "personal"
# Normalize folderId: treat empty string as "no folder" (= root) NULL in DB
normalizedFolderId: Optional[str] = folderId
if isinstance(normalizedFolderId, str) and not normalizedFolderId.strip():
normalizedFolderId = None
fileItem = FileItem( fileItem = FileItem(
mandateId=mandateId, mandateId=mandateId,
featureInstanceId=featureInstanceId, featureInstanceId=featureInstanceId,
@ -1128,7 +1140,7 @@ class ComponentObjects:
mimeType=mimeType, mimeType=mimeType,
fileSize=fileSize, fileSize=fileSize,
fileHash=fileHash, fileHash=fileHash,
folderId="", folderId=normalizedFolderId,
) )
# Store in database # Store in database
@ -1842,14 +1854,18 @@ class ComponentObjects:
logger.error(f"Error getting file content: {str(e)}") logger.error(f"Error getting file content: {str(e)}")
return None return None
def saveUploadedFile(self, fileContent: bytes, fileName: str) -> tuple[FileItem, str]: def saveUploadedFile(self, fileContent: bytes, fileName: str, folderId: Optional[str] = None) -> tuple[FileItem, str]:
"""Saves an uploaded file if user has permission.""" """Saves an uploaded file if user has permission.
Args:
folderId: Optional parent folder ID. None means root folder.
"""
try: try:
# Check file creation permission # Check file creation permission
if not self.checkRbacPermission(FileItem, "create"): if not self.checkRbacPermission(FileItem, "create"):
raise PermissionError("No permission to upload files") raise PermissionError("No permission to upload files")
logger.debug(f"Starting upload process for file: {fileName}") logger.debug(f"Starting upload process for file: {fileName} (folderId={folderId!r})")
if not isinstance(fileContent, bytes): if not isinstance(fileContent, bytes):
logger.error(f"Invalid fileContent type: {type(fileContent)}") logger.error(f"Invalid fileContent type: {type(fileContent)}")
@ -1874,7 +1890,8 @@ class ComponentObjects:
fileItem = self.createFile( fileItem = self.createFile(
name=fileName, name=fileName,
mimeType=mimeType, mimeType=mimeType,
content=fileContent content=fileContent,
folderId=folderId,
) )
# Save binary data # Save binary data

View file

@ -13,6 +13,7 @@ from datetime import datetime, timezone
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelMembership import UserMandate from modules.datamodels.datamodelMembership import UserMandate
from modules.datamodels.datamodelSubscription import ( from modules.datamodels.datamodelSubscription import (
@ -31,6 +32,7 @@ from modules.datamodels.datamodelSubscription import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
SUBSCRIPTION_DATABASE = "poweron_billing" SUBSCRIPTION_DATABASE = "poweron_billing"
registerDatabase(SUBSCRIPTION_DATABASE)
_subscriptionInterfaces: Dict[str, "SubscriptionObjects"] = {} _subscriptionInterfaces: Dict[str, "SubscriptionObjects"] = {}

View file

@ -393,6 +393,13 @@ def getRecordsetPaginatedWithRBAC(
continue continue
if key not in validColumns: if key not in validColumns:
continue continue
if val is None:
# val=None in pagination.filters means "match empty/null"
# (same convention as connectorDbPostgre._buildPaginationClauses).
# Covers both historical empty-string values and true NULLs
# e.g. root-folder files where folderId may be "" or NULL.
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue
if isinstance(val, dict): if isinstance(val, dict):
op = val.get("operator", "equals") op = val.get("operator", "equals")
v = val.get("value", "") v = val.get("value", "")
@ -569,6 +576,13 @@ def getDistinctColumnValuesWithRBAC(
continue continue
if key not in validColumns: if key not in validColumns:
continue continue
if val is None:
# val=None in pagination.filters means "match empty/null"
# (same convention as connectorDbPostgre._buildPaginationClauses).
# Covers both historical empty-string values and true NULLs
# e.g. root-folder files where folderId may be "" or NULL.
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue
if isinstance(val, dict): if isinstance(val, dict):
op = val.get("operator", "equals") op = val.get("operator", "equals")
v = val.get("value", "") v = val.get("value", "")

View file

@ -0,0 +1,102 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
SysAdmin API for database table statistics and FK orphan detection/cleanup.
"""
import logging
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Request, status
from pydantic import BaseModel, Field
from modules.auth import limiter
from modules.auth.authentication import requireSysAdminRole
from modules.datamodels.datamodelUam import User
from modules.system.databaseHealth import (
_cleanAllOrphans,
_cleanOrphans,
_getTableStats,
_scanOrphans,
)
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/api/admin/database-health",
tags=["Admin Database Health"],
)
class OrphanCleanRequest(BaseModel):
"""Body for deleting orphans for one FK relationship."""
db: str = Field(..., description="Source database name (e.g. poweron_app)")
table: str = Field(..., description="Source table (Pydantic model class name)")
column: str = Field(..., description="FK column on the source table")
@router.get("/stats")
@limiter.limit("30/minute")
def getDatabaseTableStats(
request: Request,
db: Optional[str] = None,
currentUser: User = Depends(requireSysAdminRole),
) -> Dict[str, Any]:
"""Table statistics from pg_stat_user_tables (optional filter by database name)."""
rows = _getTableStats(dbFilter=db)
return {"stats": rows}
@router.get("/orphans")
@limiter.limit("10/minute")
def getDatabaseOrphans(
request: Request,
db: Optional[str] = None,
currentUser: User = Depends(requireSysAdminRole),
) -> Dict[str, Any]:
"""FK orphan scan (optional filter by source database name)."""
rows = _scanOrphans(dbFilter=db)
return {"orphans": rows}
@router.post("/orphans/clean")
@limiter.limit("10/minute")
def postDatabaseOrphansClean(
request: Request,
body: OrphanCleanRequest,
currentUser: User = Depends(requireSysAdminRole),
) -> Dict[str, Any]:
"""Delete orphaned rows for a single FK relationship."""
try:
deleted = _cleanOrphans(body.db, body.table, body.column)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
) from e
logger.info(
"SysAdmin orphan clean: user=%s db=%s table=%s column=%s deleted=%s",
currentUser.username,
body.db,
body.table,
body.column,
deleted,
)
return {"deleted": deleted}
@router.post("/orphans/clean-all")
@limiter.limit("2/minute")
def postDatabaseOrphansCleanAll(
request: Request,
currentUser: User = Depends(requireSysAdminRole),
) -> Dict[str, Any]:
"""Run orphan cleanup for every relationship that currently has orphans."""
results: List[dict] = _cleanAllOrphans()
logger.info(
"SysAdmin orphan clean-all: user=%s batches=%s",
currentUser.username,
len(results),
)
return {"results": results}

View file

@ -10,9 +10,10 @@ Provides three views:
RBAC: mandate-admin or compliance-viewer role required. RBAC: mandate-admin or compliance-viewer role required.
""" """
import json
import logging import logging
import re import re
from typing import Optional from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from starlette.requests import Request from starlette.requests import Request
@ -27,6 +28,107 @@ routeApiMsg = apiRouteContext("routeAudit")
router = APIRouter(prefix="/api/audit", tags=["Audit"]) router = APIRouter(prefix="/api/audit", tags=["Audit"])
def _applySortFilterSearch(
items: List[Dict[str, Any]],
*,
sortJson: Optional[str] = None,
filtersJson: Optional[str] = None,
search: Optional[str] = None,
searchableKeys: Optional[List[str]] = None,
) -> List[Dict[str, Any]]:
"""Apply sort, filter and search to a list of dicts in-memory."""
if filtersJson:
try:
filters = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson
if isinstance(filters, dict):
for key, val in filters.items():
if val is None or val == "":
continue
if isinstance(val, list):
items = [r for r in items if str(r.get(key, "")) in [str(v) for v in val]]
else:
items = [r for r in items if str(r.get(key, "")).lower() == str(val).lower()]
except (json.JSONDecodeError, TypeError):
pass
if search and searchableKeys:
needle = search.lower()
items = [r for r in items if any(needle in str(r.get(k, "")).lower() for k in searchableKeys)]
if sortJson:
try:
sortList = json.loads(sortJson) if isinstance(sortJson, str) else sortJson
if isinstance(sortList, list):
for sortDef in reversed(sortList):
field = sortDef.get("field", "")
desc = sortDef.get("direction", "asc") == "desc"
items.sort(key=lambda r, f=field: (r.get(f) is None, r.get(f, "")), reverse=desc)
except (json.JSONDecodeError, TypeError):
pass
return items
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[str]:
"""Extract sorted distinct non-empty string values for a column."""
vals = set()
for r in items:
v = r.get(column)
if v is not None and v != "":
vals.add(str(v))
return sorted(vals)
def _enrichUserAndInstanceLabels(
items: List[Dict[str, Any]],
context: "RequestContext",
userKey: str = "userId",
usernameKey: str = "username",
instanceKey: str = "featureInstanceId",
instanceLabelKey: str = "instanceLabel",
) -> None:
"""Resolve userId → username and featureInstanceId → label in-place."""
userIds = set()
instanceIds = set()
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey):
userIds.add(uid)
iid = r.get(instanceKey)
if iid:
instanceIds.add(iid)
userMap: Dict[str, str] = {}
instanceMap: Dict[str, str] = {}
try:
from modules.interfaces.interfaceDbApp import getInterface
appIf = getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
)
if userIds:
users = appIf.getUsersByIds(list(userIds))
for uid, u in users.items():
name = getattr(u, "displayName", None) or getattr(u, "email", None) or uid
userMap[uid] = name
if instanceIds:
for iid in instanceIds:
fi = appIf.getFeatureInstance(iid)
if fi:
instanceMap[iid] = getattr(fi, "label", None) or getattr(fi, "featureCode", None) or iid
except Exception as e:
logger.debug("_enrichUserAndInstanceLabels: %s", e)
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey) and uid in userMap:
r[usernameKey] = userMap[uid]
iid = r.get(instanceKey)
if iid and iid in instanceMap:
r[instanceLabelKey] = instanceMap[iid]
def _requireAuditAccess(context: RequestContext): def _requireAuditAccess(context: RequestContext):
"""Raise 403 unless user has mandate-admin or compliance-viewer access.""" """Raise 403 unless user has mandate-admin or compliance-viewer access."""
from modules.auth.authentication import _hasSysAdminRole from modules.auth.authentication import _hasSysAdminRole
@ -62,6 +164,11 @@ async def getAiAuditLog(
dateTo: Optional[float] = Query(None, description="UTC epoch seconds"), dateTo: Optional[float] = Query(None, description="UTC epoch seconds"),
limit: int = Query(50, ge=1, le=500), limit: int = Query(50, ge=1, le=500),
offset: int = Query(0, ge=0), offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None, description='JSON array, e.g. [{"field":"timestamp","direction":"desc"}]'),
filters: Optional[str] = Query(None, description='JSON object, e.g. {"aiModel":"gpt-4o"}'),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None, description="'filterValues' to get distinct values for a column"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
): ):
_requireAuditAccess(context) _requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else "" mandateId = str(context.mandateId) if context.mandateId else ""
@ -69,16 +176,35 @@ async def getAiAuditLog(
raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich")) raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
from modules.shared.aiAuditLogger import aiAuditLogger from modules.shared.aiAuditLogger import aiAuditLogger
return aiAuditLogger.getAiAuditLogs( result = aiAuditLogger.getAiAuditLogs(
mandateId, mandateId,
userId=userId, userId=userId,
featureInstanceId=featureInstanceId, featureInstanceId=featureInstanceId,
aiModel=aiModel, aiModel=aiModel,
fromTimestamp=dateFrom, fromTimestamp=dateFrom,
toTimestamp=dateTo, toTimestamp=dateTo,
limit=limit, limit=9999,
offset=offset, offset=0,
) )
items = result.get("items", [])
_enrichUserAndInstanceLabels(items, context)
if mode == "filterValues" and column:
items = _applySortFilterSearch(items, filtersJson=filters)
return _distinctColumnValues(items, column)
items = _applySortFilterSearch(
items,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["username", "aiModel", "instanceLabel", "aiProvider", "operationType"],
)
totalItems = len(items)
page = items[offset: offset + limit]
return {"items": page, "totalItems": totalItems}
@router.get("/ai-log/{entryId}/content") @router.get("/ai-log/{entryId}/content")
@ -134,6 +260,11 @@ async def getAuditLog(
dateTo: Optional[float] = Query(None), dateTo: Optional[float] = Query(None),
limit: int = Query(100, ge=1, le=500), limit: int = Query(100, ge=1, le=500),
offset: int = Query(0, ge=0), offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None),
filters: Optional[str] = Query(None),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None),
column: Optional[str] = Query(None),
): ):
_requireAuditAccess(context) _requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else None mandateId = str(context.mandateId) if context.mandateId else None
@ -146,8 +277,23 @@ async def getAuditLog(
action=action, action=action,
fromTimestamp=dateFrom, fromTimestamp=dateFrom,
toTimestamp=dateTo, toTimestamp=dateTo,
limit=limit + offset + 1, limit=9999,
) )
_enrichUserAndInstanceLabels(records, context)
if mode == "filterValues" and column:
records = _applySortFilterSearch(records, filtersJson=filters)
return _distinctColumnValues(records, column)
records = _applySortFilterSearch(
records,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["username", "action", "resourceType", "category"],
)
totalItems = len(records) totalItems = len(records)
page = records[offset: offset + limit] page = records[offset: offset + limit]
return {"items": page, "totalItems": totalItems} return {"items": page, "totalItems": totalItems}
@ -181,6 +327,11 @@ async def getNeutralizationMappings(
context: RequestContext = Depends(getRequestContext), context: RequestContext = Depends(getRequestContext),
limit: int = Query(200, ge=1, le=2000), limit: int = Query(200, ge=1, le=2000),
offset: int = Query(0, ge=0), offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None),
filters: Optional[str] = Query(None),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None),
column: Optional[str] = Query(None),
): ):
_requireAuditAccess(context) _requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else "" mandateId = str(context.mandateId) if context.mandateId else ""
@ -196,7 +347,23 @@ async def getNeutralizationMappings(
pType = item.get("patternType", "") pType = item.get("patternType", "")
uid = item.get("id", "") uid = item.get("id", "")
item["placeholder"] = f"[{pType}.{uid}]" if pType and uid else uid item["placeholder"] = f"[{pType}.{uid}]" if pType and uid else uid
_enrichUserAndInstanceLabels(items, context)
if mode == "filterValues" and column:
items = _applySortFilterSearch(items, filtersJson=filters)
return _distinctColumnValues(items, column)
items = _applySortFilterSearch(
items,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["placeholder", "originalText", "patternType"],
)
if not sort:
items.sort(key=lambda r: (r.get("patternType", ""), r.get("originalText", ""))) items.sort(key=lambda r: (r.get("patternType", ""), r.get("originalText", "")))
totalItems = len(items) totalItems = len(items)
page = items[offset: offset + limit] page = items[offset: offset + limit]
return {"items": page, "totalItems": totalItems} return {"items": page, "totalItems": totalItems}

View file

@ -427,13 +427,53 @@ def update_connection(
detail=routeApiMsg("Connection not found") detail=routeApiMsg("Connection not found")
) )
# Update connection fields # Merge incoming changes into a dict and re-validate via pydantic.
# Direct setattr() bypasses type coercion (PowerOnModel doesn't enable
# validate_assignment), which leaves enum fields as raw strings and
# later breaks .value access. Also filters out computed / unknown keys.
writableFields = set(UserConnection.model_fields.keys())
previous = connection.model_dump()
merged = dict(previous)
for field, value in connection_data.items(): for field, value in connection_data.items():
if hasattr(connection, field): if field in writableFields:
setattr(connection, field, value) merged[field] = value
merged["lastChecked"] = getUtcTimestamp()
connection = UserConnection.model_validate(merged)
# Update lastChecked timestamp using UTC timestamp # If this is a remote (non-local) connection and any identity-bearing
connection.lastChecked = getUtcTimestamp() # field changed, the stored OAuth tokens no longer match the account.
# Force the user to reconnect: mark PENDING and revoke existing tokens.
identityFields = ("externalUsername", "externalEmail", "externalId", "authority")
authorityValue = (
connection.authority.value
if hasattr(connection.authority, "value")
else str(connection.authority)
)
isRemote = authorityValue != AuthAuthority.LOCAL.value
identityChanged = any(
previous.get(field) != merged.get(field) for field in identityFields
)
if isRemote and identityChanged:
connection.status = ConnectionStatus.PENDING
connection.expiresAt = None
try:
existingTokens = interface.db.getRecordset(
Token, recordFilter={"connectionId": connectionId}
)
for token in existingTokens:
interface.revokeTokenById(
token["id"],
revokedBy=currentUser.id,
reason="connection identity changed",
)
logger.info(
f"Revoked {len(existingTokens)} token(s) for connection "
f"{connectionId} after identity change; reconnect required."
)
except Exception as e:
logger.warning(
f"Failed to revoke tokens for connection {connectionId}: {str(e)}"
)
# Update connection - models now handle timestamp serialization automatically # Update connection - models now handle timestamp serialization automatically
interface.db.recordModify(UserConnection, connectionId, connection.model_dump()) interface.db.recordModify(UserConnection, connectionId, connection.model_dump())

View file

@ -243,7 +243,15 @@ def get_files(
recordFilter = None recordFilter = None
if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters: if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters:
fVal = paginationParams.filters.pop("folderId") fVal = paginationParams.filters.get("folderId")
# For a concrete folderId we use recordFilter (exact equality).
# For null / empty (= "root") we keep it in pagination.filters so the
# connector applies `IS NULL OR = ''` files predating the folderId
# fix were stored with an empty string instead of NULL.
if fVal is None or (isinstance(fVal, str) and fVal.strip() == ""):
paginationParams.filters["folderId"] = None
else:
paginationParams.filters.pop("folderId")
recordFilter = {"folderId": fVal} recordFilter = {"folderId": fVal}
result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter) result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter)
@ -282,13 +290,19 @@ async def upload_file(
file: UploadFile = File(...), file: UploadFile = File(...),
workflowId: Optional[str] = Form(None), workflowId: Optional[str] = Form(None),
featureInstanceId: Optional[str] = Form(None), featureInstanceId: Optional[str] = Form(None),
currentUser: User = Depends(getCurrentUser) folderId: Optional[str] = Form(None),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext),
) -> JSONResponse: ) -> JSONResponse:
# Add fileName property to UploadFile for consistency with backend model # Add fileName property to UploadFile for consistency with backend model
file.fileName = file.filename file.fileName = file.filename
"""Upload a file""" """Upload a file"""
try: try:
managementInterface = interfaceDbManagement.getInterface(currentUser) managementInterface = interfaceDbManagement.getInterface(
currentUser,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
# Read file # Read file
fileContent = await file.read() fileContent = await file.read()
@ -301,13 +315,30 @@ async def upload_file(
detail=f"File too large. Maximum size: {interfaceDbManagement.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB" detail=f"File too large. Maximum size: {interfaceDbManagement.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB"
) )
# Normalize folderId: empty string / "null" / "root" → None (root folder)
normalizedFolderId: Optional[str] = folderId
if isinstance(normalizedFolderId, str):
trimmed = normalizedFolderId.strip()
if not trimmed or trimmed.lower() in {"null", "none", "root"}:
normalizedFolderId = None
else:
normalizedFolderId = trimmed
# Save file via LucyDOM interface in the database # Save file via LucyDOM interface in the database
fileItem, duplicateType = managementInterface.saveUploadedFile(fileContent, file.filename) fileItem, duplicateType = managementInterface.saveUploadedFile(
fileContent, file.filename, folderId=normalizedFolderId
)
if featureInstanceId and not fileItem.featureInstanceId: if featureInstanceId and not fileItem.featureInstanceId:
managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId}) managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId})
fileItem.featureInstanceId = featureInstanceId fileItem.featureInstanceId = featureInstanceId
# For exact duplicates we keep the existing record, but move it into the
# target folder so the user actually sees their upload land where they expect.
if duplicateType == "exact_duplicate" and normalizedFolderId != getattr(fileItem, "folderId", None):
managementInterface.updateFile(fileItem.id, {"folderId": normalizedFolderId})
fileItem.folderId = normalizedFolderId
# Determine response message based on duplicate type # Determine response message based on duplicate type
if duplicateType == "exact_duplicate": if duplicateType == "exact_duplicate":
message = f"File '{file.filename}' already exists with identical content. Reusing existing file." message = f"File '{file.filename}' already exists with identical content. Reusing existing file."
@ -502,6 +533,153 @@ def move_folder(
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@router.patch("/folders/{folderId}/scope")
@limiter.limit("10/minute")
def _updateFolderScope(
request: Request,
folderId: str = Path(..., description="ID of the folder"),
scope: str = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Update the scope of a folder. Propagates to all files inside (recursively). Global scope requires sysAdmin."""
validScopes = {"personal", "featureInstance", "mandate", "global"}
if scope not in validScopes:
raise HTTPException(status_code=400, detail=f"Invalid scope: {scope}. Must be one of {validScopes}")
if scope == "global" and not _hasSysAdminRole(context.user):
raise HTTPException(status_code=403, detail=routeApiMsg("Only sysadmins can set global scope"))
try:
mgmt = interfaceDbManagement.getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
folder = mgmt.getFolder(folderId)
if not folder:
raise HTTPException(status_code=404, detail=routeApiMsg("Folder not found"))
mgmt.updateFolder(folderId, {"scope": scope})
fileIds = _collectFolderFileIds(mgmt, folderId)
for fid in fileIds:
try:
mgmt.updateFile(fid, {"scope": scope})
except Exception as e:
logger.error("Folder scope propagation: failed to update file %s: %s", fid, e)
logger.info("Updated scope=%s for folder %s: %d files affected", scope, folderId, len(fileIds))
return {"folderId": folderId, "scope": scope, "filesUpdated": len(fileIds)}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating folder scope: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.patch("/folders/{folderId}/neutralize")
@limiter.limit("10/minute")
def updateFolderNeutralize(
request: Request,
background_tasks: BackgroundTasks,
folderId: str = Path(..., description="ID of the folder"),
neutralize: bool = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Toggle neutralization on a folder. Propagates to all files inside (recursively).
When turning ON: all files in the folder get ``neutralize=True``, their
knowledge indexes are purged synchronously, and background re-indexing
is triggered.
When turning OFF: files revert to ``neutralize=False`` unless they were
individually marked (not implemented yet -- all are reverted).
"""
try:
mgmt = interfaceDbManagement.getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
folder = mgmt.getFolder(folderId)
if not folder:
raise HTTPException(status_code=404, detail=routeApiMsg("Folder not found"))
mgmt.updateFolder(folderId, {"neutralize": neutralize})
fileIds = _collectFolderFileIds(mgmt, folderId)
logger.info("Folder neutralize toggle %s for folder %s: %d files affected", neutralize, folderId, len(fileIds))
from modules.interfaces.interfaceDbKnowledge import getInterface as getKnowledgeInterface
knowledgeDb = getKnowledgeInterface()
for fid in fileIds:
try:
mgmt.updateFile(fid, {"neutralize": neutralize})
if neutralize:
try:
knowledgeDb.deleteFileContentIndex(fid)
except Exception as e:
logger.warning("Folder neutralize: failed to purge index for file %s: %s", fid, e)
else:
try:
from modules.datamodels.datamodelKnowledge import FileContentIndex
indices = knowledgeDb.db.getRecordset(FileContentIndex, recordFilter={"id": fid})
for idx in indices:
idxId = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
if idxId:
knowledgeDb.db.recordModify(FileContentIndex, idxId, {
"neutralizationStatus": "original",
"isNeutralized": False,
})
except Exception as e:
logger.warning("Folder neutralize OFF: metadata update failed for %s: %s", fid, e)
except Exception as e:
logger.error("Folder neutralize: failed to update file %s: %s", fid, e)
for fid in fileIds:
fileMeta = mgmt.getFile(fid)
if fileMeta:
fn = fileMeta.fileName if hasattr(fileMeta, "fileName") else fileMeta.get("fileName", "")
mt = fileMeta.mimeType if hasattr(fileMeta, "mimeType") else fileMeta.get("mimeType", "")
async def _reindex(fileId=fid, fileName=fn, mimeType=mt):
try:
await _autoIndexFile(fileId=fileId, fileName=fileName, mimeType=mimeType, user=context.user)
except Exception as ex:
logger.error("Folder neutralize re-index failed for %s: %s", fileId, ex)
background_tasks.add_task(_reindex)
return {"folderId": folderId, "neutralize": neutralize, "filesUpdated": len(fileIds)}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating folder neutralize flag: {e}")
raise HTTPException(status_code=500, detail=str(e))
def _collectFolderFileIds(mgmt, folderId: str) -> List[str]:
"""Recursively collect all file IDs in a folder and its sub-folders."""
fileIds = []
try:
files = mgmt.listFiles(folderId=folderId)
if isinstance(files, dict):
files = files.get("files", [])
for f in (files or []):
fid = f.get("id") if isinstance(f, dict) else getattr(f, "id", None)
if fid:
fileIds.append(fid)
except Exception as e:
logger.warning("_collectFolderFileIds: listFiles failed for folder %s: %s", folderId, e)
try:
subFolders = mgmt.listFolders(parentId=folderId)
for sf in (subFolders or []):
sfId = sf.get("id") if isinstance(sf, dict) else getattr(sf, "id", None)
if sfId:
fileIds.extend(_collectFolderFileIds(mgmt, sfId))
except Exception as e:
logger.warning("_collectFolderFileIds: listFolders failed for folder %s: %s", folderId, e)
return fileIds
@router.get("/folders/{folderId}/download") @router.get("/folders/{folderId}/download")
@limiter.limit("10/minute") @limiter.limit("10/minute")
def download_folder( def download_folder(
@ -1028,6 +1206,18 @@ def move_file(
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None, featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
) )
mgmt.updateFile(fileId, {"folderId": targetFolderId}) mgmt.updateFile(fileId, {"folderId": targetFolderId})
if targetFolderId:
try:
targetFolder = mgmt.getFolder(targetFolderId)
folderNeut = (targetFolder.get("neutralize") if isinstance(targetFolder, dict)
else getattr(targetFolder, "neutralize", False)) if targetFolder else False
if folderNeut:
mgmt.updateFile(fileId, {"neutralize": True})
logger.info("File %s moved to neutralized folder %s — inherited neutralize=True", fileId, targetFolderId)
except Exception as e:
logger.warning("File move: folder neutralize inheritance check failed for %s: %s", fileId, e)
return {"success": True, "fileId": fileId, "folderId": targetFolderId} return {"success": True, "fileId": fileId, "folderId": targetFolderId}
except Exception as e: except Exception as e:
logger.error(f"Error moving file: {e}") logger.error(f"Error moving file: {e}")

View file

@ -17,7 +17,7 @@ import json
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
# Import auth module # Import auth module
from modules.auth import limiter, requireSysAdminRole, getRequestContext, RequestContext from modules.auth import limiter, requireSysAdminRole, getRequestContext, getCurrentUser, RequestContext
# Import interfaces # Import interfaces
import modules.interfaces.interfaceDbApp as interfaceDbApp import modules.interfaces.interfaceDbApp as interfaceDbApp
@ -341,24 +341,58 @@ def create_mandate(
detail=f"Failed to create mandate: {str(e)}" detail=f"Failed to create mandate: {str(e)}"
) )
_MANDATE_ADMIN_EDITABLE_FIELDS = {"label"}
def _isUserAdminOfMandate(userId: str, targetMandateId: str) -> bool:
"""Check mandate-admin without RequestContext (avoids Header param conflicts)."""
try:
rootInterface = interfaceDbApp.getRootInterface()
userMandates = rootInterface.getUserMandates(userId)
for um in userMandates:
if str(getattr(um, 'mandateId', '')) != str(targetMandateId):
continue
umId = getattr(um, 'id', None)
if not umId:
continue
roleIds = rootInterface.getRoleIdsForUserMandate(str(umId))
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return True
except Exception as e:
logger.error(f"Error checking mandate admin: {e}")
return False
@router.put("/{mandateId}", response_model=Mandate) @router.put("/{mandateId}", response_model=Mandate)
@limiter.limit("10/minute") @limiter.limit("10/minute")
def update_mandate( def update_mandate(
request: Request, request: Request,
mandateId: str = Path(..., description="ID of the mandate to update"), mandateId: str = Path(..., description="ID of the mandate to update"),
mandateData: dict = Body(..., description="Mandate update data"), mandateData: dict = Body(..., description="Mandate update data"),
currentUser: User = Depends(requireSysAdminRole) currentUser: User = Depends(getCurrentUser)
) -> Mandate: ) -> Mandate:
""" """
Update an existing mandate. Update an existing mandate.
MULTI-TENANT: SysAdmin-only. MULTI-TENANT:
- SysAdmin: full update
- MandateAdmin: only label
""" """
from modules.auth import _hasSysAdminRole as _checkSysAdminRole
userId = str(currentUser.id)
isSysAdmin = _checkSysAdminRole(userId)
if not isSysAdmin:
if not _isUserAdminOfMandate(userId, mandateId):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to update mandate")
)
try: try:
logger.debug(f"Updating mandate {mandateId} with data: {mandateData}") logger.debug(f"Updating mandate {mandateId} with data: {mandateData}")
appInterface = interfaceDbApp.getRootInterface() appInterface = interfaceDbApp.getRootInterface()
# Check if mandate exists
existingMandate = appInterface.getMandate(mandateId) existingMandate = appInterface.getMandate(mandateId)
if not existingMandate: if not existingMandate:
raise HTTPException( raise HTTPException(
@ -366,7 +400,14 @@ def update_mandate(
detail=f"Mandate with ID {mandateId} not found" detail=f"Mandate with ID {mandateId} not found"
) )
# Update mandate - mandateData is already a dict if not isSysAdmin:
mandateData = {k: v for k, v in mandateData.items() if k in _MANDATE_ADMIN_EDITABLE_FIELDS}
if not mandateData:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("No editable fields submitted")
)
updatedMandate = appInterface.updateMandate(mandateId, mandateData) updatedMandate = appInterface.updateMandate(mandateId, mandateData)
if not updatedMandate: if not updatedMandate:
@ -375,7 +416,7 @@ def update_mandate(
detail=routeApiMsg("Failed to update mandate") detail=routeApiMsg("Failed to update mandate")
) )
logger.info(f"Mandate {mandateId} updated by SysAdmin {currentUser.id}") logger.info(f"Mandate {mandateId} updated by user {currentUser.id} (sysadmin={isSysAdmin})")
return updatedMandate return updatedMandate
except HTTPException: except HTTPException:

View file

@ -3,7 +3,7 @@
"""PATCH endpoints for DataSource and FeatureDataSource scope/neutralize tagging.""" """PATCH endpoints for DataSource and FeatureDataSource scope/neutralize tagging."""
import logging import logging
from typing import Any, Dict from typing import Any, Dict, List, Optional
from fastapi import APIRouter, HTTPException, Depends, Path, Request, Body from fastapi import APIRouter, HTTPException, Depends, Path, Request, Body
from modules.auth import limiter, getRequestContext, RequestContext from modules.auth import limiter, getRequestContext, RequestContext
@ -97,3 +97,32 @@ def _updateDataSourceNeutralize(
except Exception as e: except Exception as e:
logger.error("Error updating datasource neutralize: %s", e) logger.error("Error updating datasource neutralize: %s", e)
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@router.patch("/{sourceId}/neutralize-fields")
@limiter.limit("30/minute")
def _updateNeutralizeFields(
request: Request,
sourceId: str = Path(..., description="ID of the FeatureDataSource"),
neutralizeFields: List[str] = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Update the list of field names to neutralize on a FeatureDataSource."""
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootIf = getRootInterface()
rec = rootIf.db.getRecord(FeatureDataSource, sourceId)
if not rec:
raise HTTPException(status_code=404, detail=f"FeatureDataSource {sourceId} not found")
cleanFields = [f for f in neutralizeFields if f and isinstance(f, str)] if neutralizeFields else []
rootIf.db.recordModify(FeatureDataSource, sourceId, {
"neutralizeFields": cleanFields if cleanFields else None,
})
logger.info("Updated neutralizeFields=%s for FeatureDataSource %s", cleanFields, sourceId)
return {"sourceId": sourceId, "neutralizeFields": cleanFields, "updated": True}
except HTTPException:
raise
except Exception as e:
logger.error("Error updating neutralizeFields: %s", e)
raise HTTPException(status_code=500, detail=str(e))

View file

@ -481,8 +481,9 @@ def update_user(
detail=f"User with ID {userId} not found" detail=f"User with ID {userId} not found"
) )
# Update user # SysAdmins may toggle the isSysAdmin flag on other users
updatedUser = rootInterface.updateUser(userId, userData) callerIsSysAdmin = context.isSysAdmin or context.hasSysAdminRole
updatedUser = rootInterface.updateUser(userId, userData, allowSysAdminChange=(callerIsSysAdmin and not isSelfUpdate))
if not updatedUser: if not updatedUser:
raise HTTPException( raise HTTPException(

View file

@ -26,6 +26,7 @@ from modules.datamodels.datamodelPagination import PaginationParams, normalize_p
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import ( from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoRun, AutoStepLog, AutoWorkflow, AutoTask, AutoVersion, AutoRun, AutoStepLog, AutoWorkflow, AutoTask, AutoVersion,
) )
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
from modules.shared.i18nRegistry import apiRouteContext from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeWorkflowDashboard") routeApiMsg = apiRouteContext("routeWorkflowDashboard")
@ -35,13 +36,11 @@ limiter = Limiter(key_func=get_remote_address)
router = APIRouter(prefix="/api/system/workflow-runs", tags=["WorkflowDashboard"]) router = APIRouter(prefix="/api/system/workflow-runs", tags=["WorkflowDashboard"])
_GREENFIELD_DB = "poweron_graphicaleditor"
def _getDb() -> DatabaseConnector: def _getDb() -> DatabaseConnector:
return DatabaseConnector( return DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"), dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase=_GREENFIELD_DB, dbDatabase=graphicalEditorDatabase,
dbUser=APP_CONFIG.get("DB_USER"), dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"), dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)), dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),

View file

@ -9,6 +9,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import ( from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
_buildResolverDbFromServices,
_getOrCreateTempFolder, _getOrCreateTempFolder,
_looksLikeBinary, _looksLikeBinary,
_resolveFileScope, _resolveFileScope,
@ -22,20 +23,6 @@ def _registerConnectionTools(registry: ToolRegistry, services):
"""Auto-extracted from registerCoreTools.""" """Auto-extracted from registerCoreTools."""
# ---- Connection tools (external data sources) ---- # ---- Connection tools (external data sources) ----
def _buildResolverDb():
"""Build a DB adapter that ConnectorResolver can use to load UserConnections.
interfaceDbApp has getUserConnectionById; ConnectorResolver expects getUserConnection."""
chatService = services.chat
appIf = getattr(chatService, "interfaceDbApp", None)
if appIf and hasattr(appIf, "getUserConnectionById"):
class _Adapter:
def __init__(self, app):
self._app = app
def getUserConnection(self, connectionId: str):
return self._app.getUserConnectionById(connectionId)
return _Adapter(appIf)
return getattr(chatService, "interfaceDbComponent", None)
async def _listConnections(args: Dict[str, Any], context: Dict[str, Any]): async def _listConnections(args: Dict[str, Any], context: Dict[str, Any]):
try: try:
chatService = services.chat chatService = services.chat
@ -49,7 +36,12 @@ def _registerConnectionTools(registry: ToolRegistry, services):
authorityVal = authority.value if hasattr(authority, "value") else str(authority) authorityVal = authority.value if hasattr(authority, "value") else str(authority)
username = conn.get("externalUsername", "") if isinstance(conn, dict) else getattr(conn, "externalUsername", "") username = conn.get("externalUsername", "") if isinstance(conn, dict) else getattr(conn, "externalUsername", "")
email = conn.get("externalEmail", "") if isinstance(conn, dict) else getattr(conn, "externalEmail", "") email = conn.get("externalEmail", "") if isinstance(conn, dict) else getattr(conn, "externalEmail", "")
lines.append(f"- connectionId: {connId} | {authorityVal} | {username} ({email})") cid = conn.get("id", "") if isinstance(conn, dict) else getattr(conn, "id", "")
ref = f"connection:{authorityVal}:{username}"
lines.append(
f"- {ref} connectionId={cid} ({email}) "
f"(use this full connection: line or connectionId as connectionReference)"
)
return ToolResult(toolCallId="", toolName="listConnections", success=True, data="\n".join(lines)) return ToolResult(toolCallId="", toolName="listConnections", success=True, data="\n".join(lines))
except Exception as e: except Exception as e:
return ToolResult(toolCallId="", toolName="listConnections", success=False, error=str(e)) return ToolResult(toolCallId="", toolName="listConnections", success=False, error=str(e))
@ -65,7 +57,7 @@ def _registerConnectionTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver( resolver = ConnectorResolver(
services.getService("security"), services.getService("security"),
_buildResolverDb(), _buildResolverDbFromServices(services),
) )
adapter = await resolver.resolveService(connectionId, service) adapter = await resolver.resolveService(connectionId, service)
chatService = services.chat chatService = services.chat
@ -115,7 +107,7 @@ def _registerConnectionTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver( resolver = ConnectorResolver(
services.getService("security"), services.getService("security"),
_buildResolverDb(), _buildResolverDbFromServices(services),
) )
adapter = await resolver.resolveService(connectionId, "outlook") adapter = await resolver.resolveService(connectionId, "outlook")

View file

@ -9,6 +9,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import ( from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
_buildResolverDbFromServices,
_getOrCreateTempFolder, _getOrCreateTempFolder,
_looksLikeBinary, _looksLikeBinary,
_resolveFileScope, _resolveFileScope,
@ -88,7 +89,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver( resolver = ConnectorResolver(
services.getService("security"), services.getService("security"),
_buildResolverDb(), _buildResolverDbFromServices(services),
) )
adapter = await resolver.resolveService(connectionId, service) adapter = await resolver.resolveService(connectionId, service)
entries = await adapter.browse(browsePath, filter=args.get("filter")) entries = await adapter.browse(browsePath, filter=args.get("filter"))
@ -124,7 +125,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver( resolver = ConnectorResolver(
services.getService("security"), services.getService("security"),
_buildResolverDb(), _buildResolverDbFromServices(services),
) )
adapter = await resolver.resolveService(connectionId, service) adapter = await resolver.resolveService(connectionId, service)
entries = await adapter.search(query, path=basePath) entries = await adapter.search(query, path=basePath)
@ -160,7 +161,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
fullPath = filePath if filePath.startswith("/") else f"{basePath.rstrip('/')}/{filePath}" fullPath = filePath if filePath.startswith("/") else f"{basePath.rstrip('/')}/{filePath}"
resolver = ConnectorResolver( resolver = ConnectorResolver(
services.getService("security"), services.getService("security"),
_buildResolverDb(), _buildResolverDbFromServices(services),
) )
adapter = await resolver.resolveService(connectionId, service) adapter = await resolver.resolveService(connectionId, service)
result = await adapter.download(fullPath) result = await adapter.download(fullPath)

View file

@ -2,6 +2,7 @@
# All rights reserved. # All rights reserved.
"""Document and vision tools (containers, content objects, image description).""" """Document and vision tools (containers, content objects, image description)."""
import json
import logging import logging
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
@ -18,6 +19,76 @@ from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _parseUdmJson(raw: Any) -> Optional[Dict[str, Any]]:
if raw is None:
return None
if isinstance(raw, dict):
return raw
if isinstance(raw, str) and raw.strip():
try:
data = json.loads(raw)
return data if isinstance(data, dict) else None
except json.JSONDecodeError:
return None
return None
def _walkUdmBlocksImpl(udm: Dict[str, Any], out: List[Dict[str, Any]], path: str) -> None:
if udm.get("contentType"):
raw = udm.get("raw") or ""
preview = raw[:240] + ("" if len(raw) > 240 else "")
out.append({
"path": path,
"id": udm.get("id"),
"contentType": udm.get("contentType"),
"rawPreview": preview,
})
children = udm.get("children") or []
for i, ch in enumerate(children):
if isinstance(ch, dict):
role = ch.get("role") or "node"
label = f"{path}/children[{i}]"
if ch.get("role") in ("page", "section", "slide", "sheet"):
label = f"{path}/{role}[{ch.get('index', i)}]"
_walkUdmBlocksImpl(ch, out, label)
def _getUdmStructureText(udm: Dict[str, Any]) -> str:
lines = [
f"id: {udm.get('id', '?')}",
f"role: {udm.get('role', '?')}",
f"sourceType: {udm.get('sourceType', '?')}",
f"sourcePath: {udm.get('sourcePath', '')}",
]
nodes = udm.get("children") or []
lines.append(f"structuralNodes (top-level): {len(nodes)}")
for i, sn in enumerate(nodes[:80]):
if isinstance(sn, dict):
role = sn.get("role", "?")
idx = sn.get("index", i)
lab = sn.get("label") or ""
blocks = sn.get("children") or []
lines.append(f" [{i}] {role} index={idx} label={lab!r} contentBlocks={len(blocks)}")
if len(nodes) > 80:
lines.append(f" … and {len(nodes) - 80} more structural nodes")
return "\n".join(lines)
def _filterUdmByTypeImpl(udm: Dict[str, Any], content_type: str) -> Dict[str, Any]:
hits: List[Dict[str, Any]] = []
def collect(node: Any) -> None:
if not isinstance(node, dict):
return
if node.get("contentType") == content_type:
hits.append(dict(node))
for child in node.get("children") or []:
collect(child)
collect(udm)
return {"nodes": hits, "count": len(hits), "contentType": content_type}
def _registerDocumentTools(registry: ToolRegistry, services): def _registerDocumentTools(registry: ToolRegistry, services):
"""Auto-extracted from registerCoreTools.""" """Auto-extracted from registerCoreTools."""
# ---- Document tools (Smart Documents / Container Handling) ---- # ---- Document tools (Smart Documents / Container Handling) ----
@ -205,6 +276,91 @@ def _registerDocumentTools(registry: ToolRegistry, services):
readOnly=True, readOnly=True,
) )
# ---- UDM (Unified Document Model) tools ----
async def _getUdmStructure(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
if not udm:
return ToolResult(toolCallId="", toolName="getUdmStructure", success=False, error="udmJson must be a JSON object or string")
text = _getUdmStructureText(udm)
return ToolResult(toolCallId="", toolName="getUdmStructure", success=True, data=text)
async def _walkUdmBlocks(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
if not udm:
return ToolResult(toolCallId="", toolName="walkUdmBlocks", success=False, error="udmJson must be a JSON object or string")
blocks: List[Dict[str, Any]] = []
_walkUdmBlocksImpl(udm, blocks, "document")
max_n = int(args.get("maxResults") or 200)
trimmed = blocks[:max_n]
lines = [f"Total content blocks found: {len(blocks)} (showing {len(trimmed)})"]
for b in trimmed:
lines.append(f"{b.get('path')} | {b.get('contentType')} | id={b.get('id')}")
if b.get("rawPreview"):
lines.append(f" preview: {b['rawPreview'][:120]}")
if len(blocks) > max_n:
lines.append(f"... {len(blocks) - max_n} more not shown (increase maxResults)")
return ToolResult(toolCallId="", toolName="walkUdmBlocks", success=True, data="\n".join(lines))
async def _filterUdmByType(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
content_type = (args.get("contentType") or "").strip()
if not udm:
return ToolResult(toolCallId="", toolName="filterUdmByType", success=False, error="udmJson is required")
if not content_type:
return ToolResult(toolCallId="", toolName="filterUdmByType", success=False, error="contentType is required")
filtered = _filterUdmByTypeImpl(udm, content_type)
return ToolResult(
toolCallId="",
toolName="filterUdmByType",
success=True,
data=json.dumps(filtered, ensure_ascii=False, default=str)[:_MAX_TOOL_RESULT_CHARS],
)
registry.register(
"getUdmStructure",
_getUdmStructure,
description="Summarize hierarchy of a Unified Document Model (UDM) JSON: ids, sourceType, structural nodes and block counts. Pass udmJson as stringified JSON.",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document object (Document → StructuralNode → ContentBlock)"},
},
"required": ["udmJson"],
},
readOnly=True,
)
registry.register(
"walkUdmBlocks",
_walkUdmBlocks,
description="Depth-first walk over a UDM tree; lists each ContentBlock with path, id, type, and short text preview.",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document"},
"maxResults": {"type": "integer", "description": "Max blocks to return (default 200)"},
},
"required": ["udmJson"],
},
readOnly=True,
)
registry.register(
"filterUdmByType",
_filterUdmByType,
description="Return all ContentBlocks in a UDM tree whose contentType matches (e.g. table, image, text).",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document"},
"contentType": {"type": "string", "description": "contentType to match (text, image, table, code, media, link, formula)"},
},
"required": ["udmJson", "contentType"],
},
readOnly=True,
)
# ---- Vision tool ---- # ---- Vision tool ----
async def _describeImage(args: Dict[str, Any], context: Dict[str, Any]): async def _describeImage(args: Dict[str, Any], context: Dict[str, Any]):

View file

@ -116,6 +116,17 @@ def _registerFeatureSubAgentTools(registry: ToolRegistry, services):
for ds in (featureDataSources or []) for ds in (featureDataSources or [])
) )
neutralizeFieldsPerTable: Dict[str, List[str]] = {}
for ds in (featureDataSources or []):
nf = ds.get("neutralizeFields") if isinstance(ds, dict) else getattr(ds, "neutralizeFields", None)
tn = ds.get("tableName", "") if isinstance(ds, dict) else getattr(ds, "tableName", "")
if nf and isinstance(nf, list) and tn:
existing = neutralizeFieldsPerTable.get(tn, [])
for f in nf:
if f not in existing:
existing.append(f)
neutralizeFieldsPerTable[tn] = existing
from modules.security.rbacCatalog import getCatalogService from modules.security.rbacCatalog import getCatalogService
catalog = getCatalogService() catalog = getCatalogService()
tableFilters = {} tableFilters = {}
@ -182,6 +193,7 @@ def _registerFeatureSubAgentTools(registry: ToolRegistry, services):
instanceLabel=instanceLabel, instanceLabel=instanceLabel,
tableFilters=tableFilters, tableFilters=tableFilters,
requestLang=requestLang, requestLang=requestLang,
neutralizeFields=neutralizeFieldsPerTable if neutralizeFieldsPerTable else None,
) )
_featureQueryCache[cacheKey] = (time.time(), answer) _featureQueryCache[cacheKey] = (time.time(), answer)
@ -201,13 +213,9 @@ def _registerFeatureSubAgentTools(registry: ToolRegistry, services):
"queryFeatureInstance", _queryFeatureInstance, "queryFeatureInstance", _queryFeatureInstance,
description=( description=(
"Query data from a feature instance (e.g. Trustee, CommCoach). " "Query data from a feature instance (e.g. Trustee, CommCoach). "
"Delegates to a specialized sub-agent that knows the feature's data schema " "Delegates to a sub-agent that knows the feature schema. "
"and can browse, filter, and aggregate its tables. Use this when the user " "Requires the feature instance id from attached feature data sources. "
"has attached feature data sources or asks about feature-specific data.\n\n" "Ask one precise, self-contained question per call."
"GUIDELINES:\n"
"- Ask a precise, self-contained question (include all context the sub-agent needs).\n"
"- Combine related data needs into ONE call instead of multiple small ones.\n"
"- Avoid calling this tool repeatedly with slight variations of the same question."
), ),
parameters={ parameters={
"type": "object", "type": "object",

View file

@ -3,7 +3,7 @@
"""Shared helpers for core agent tools (file scope, binary detection, temp folder).""" """Shared helpers for core agent tools (file scope, binary detection, temp folder)."""
import logging import logging
from typing import Optional from typing import Any, Optional
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -77,3 +77,23 @@ def _getOrCreateTempFolder(chatService) -> Optional[str]:
logger.warning(f"Could not get/create Temp folder: {e}") logger.warning(f"Could not get/create Temp folder: {e}")
return None return None
def _buildResolverDbFromServices(services: Any):
"""DB adapter for ConnectorResolver: load UserConnections by id.
interfaceDbApp exposes getUserConnectionById; ConnectorResolver expects getUserConnection.
"""
chatService = services.chat
appIf = getattr(chatService, "interfaceDbApp", None)
if appIf and hasattr(appIf, "getUserConnectionById"):
class _Adapter:
def __init__(self, app):
self._app = app
def getUserConnection(self, connectionId: str):
return self._app.getUserConnectionById(connectionId)
return _Adapter(appIf)
return getattr(chatService, "interfaceDbComponent", None)

View file

@ -41,6 +41,7 @@ async def runFeatureDataAgent(
instanceLabel: str = "", instanceLabel: str = "",
tableFilters: Optional[Dict[str, Dict[str, str]]] = None, tableFilters: Optional[Dict[str, Dict[str, str]]] = None,
requestLang: Optional[str] = None, requestLang: Optional[str] = None,
neutralizeFields: Optional[Dict[str, List[str]]] = None,
) -> str: ) -> str:
"""Run the feature data sub-agent and return the textual result. """Run the feature data sub-agent and return the textual result.
@ -56,12 +57,14 @@ async def runFeatureDataAgent(
instanceLabel: Human-readable instance name for context. instanceLabel: Human-readable instance name for context.
tableFilters: Per-table record filters from FeatureDataSource.recordFilter. tableFilters: Per-table record filters from FeatureDataSource.recordFilter.
requestLang: ISO 639-1 code for resolving multilingual table labels in the schema prompt. requestLang: ISO 639-1 code for resolving multilingual table labels in the schema prompt.
neutralizeFields: Per-table list of field names to mask with placeholders
before returning data to the AI.
Returns: Returns:
Plain-text answer produced by the sub-agent. Plain-text answer produced by the sub-agent.
""" """
provider = FeatureDataProvider(dbConnector) provider = FeatureDataProvider(dbConnector, neutralizeFields=neutralizeFields)
registry = _buildSubAgentTools(provider, featureInstanceId, mandateId, tableFilters or {}) registry = _buildSubAgentTools(provider, featureInstanceId, mandateId, tableFilters or {})
for tbl in selectedTables: for tbl in selectedTables:

View file

@ -8,12 +8,13 @@ feature table. All queries are automatically filtered by featureInstanceId
and mandateId so data isolation is guaranteed. and mandateId so data isolation is guaranteed.
""" """
import hashlib
import logging import logging
import json import json
import os import os
import time import time
from pathlib import Path from pathlib import Path
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional, Set
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -61,12 +62,18 @@ _ALLOWED_AGGREGATES = {"SUM", "COUNT", "AVG", "MIN", "MAX"}
class FeatureDataProvider: class FeatureDataProvider:
"""Reads feature-instance data from the DB using DATA_OBJECTS metadata.""" """Reads feature-instance data from the DB using DATA_OBJECTS metadata."""
def __init__(self, dbConnector): def __init__(self, dbConnector, neutralizeFields: Optional[Dict[str, List[str]]] = None):
""" """
Args: Args:
dbConnector: A connectorDbPostgre.DatabaseConnector with an open connection. dbConnector: A connectorDbPostgre.DatabaseConnector with an open connection.
neutralizeFields: Per-table field names whose values must be replaced
with placeholders before returning to the AI, e.g.
``{"TrusteePosition": ["firstName", "lastName", "address"]}``.
""" """
self._db = dbConnector self._db = dbConnector
self._neutralizeFields: Dict[str, Set[str]] = {
tbl: set(fields) for tbl, fields in (neutralizeFields or {}).items()
}
# ------------------------------------------------------------------ # ------------------------------------------------------------------
# public API (called by FeatureDataAgent tools) # public API (called by FeatureDataAgent tools)
@ -102,6 +109,13 @@ class FeatureDataProvider:
logger.warning(f"getActualColumns({tableName}) failed: {e}") logger.warning(f"getActualColumns({tableName}) failed: {e}")
return [] return []
def _applyFieldNeutralization(self, tableName: str, rows: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Neutralize sensitive field values in query results before they reach the AI."""
fieldsToNeut = self._neutralizeFields.get(tableName)
if not fieldsToNeut:
return rows
return [_neutralizeRowFields(row, fieldsToNeut) for row in rows]
def browseTable( def browseTable(
self, self,
tableName: str, tableName: str,
@ -152,6 +166,7 @@ class FeatureDataProvider:
cur.execute(dataSql, allParams + [limit, offset]) cur.execute(dataSql, allParams + [limit, offset])
rows = [_serializeRow(dict(r)) for r in cur.fetchall()] rows = [_serializeRow(dict(r)) for r in cur.fetchall()]
rows = self._applyFieldNeutralization(tableName, rows)
result = {"rows": rows, "total": total, "limit": limit, "offset": offset} result = {"rows": rows, "total": total, "limit": limit, "offset": offset}
_debugQueryLog("browseTable", tableName, { _debugQueryLog("browseTable", tableName, {
"fields": fields, "limit": limit, "offset": offset, "fields": fields, "limit": limit, "offset": offset,
@ -220,6 +235,7 @@ class FeatureDataProvider:
cur.execute(sql, allParams) cur.execute(sql, allParams)
rows = [_serializeRow(dict(r)) for r in cur.fetchall()] rows = [_serializeRow(dict(r)) for r in cur.fetchall()]
rows = self._applyFieldNeutralization(tableName, rows)
result = { result = {
"rows": rows, "rows": rows,
"aggregate": aggregate, "aggregate": aggregate,
@ -298,6 +314,7 @@ class FeatureDataProvider:
cur.execute(dataSql, allParams + [limit, offset]) cur.execute(dataSql, allParams + [limit, offset])
rows = [_serializeRow(dict(r)) for r in cur.fetchall()] rows = [_serializeRow(dict(r)) for r in cur.fetchall()]
rows = self._applyFieldNeutralization(tableName, rows)
result = {"rows": rows, "total": total, "limit": limit, "offset": offset} result = {"rows": rows, "total": total, "limit": limit, "offset": offset}
_debugQueryLog("queryTable", tableName, { _debugQueryLog("queryTable", tableName, {
"filters": filters, "fields": fields, "orderBy": orderBy, "filters": filters, "fields": fields, "orderBy": orderBy,
@ -417,3 +434,22 @@ def _serializeRow(row: Dict[str, Any]) -> Dict[str, Any]:
elif hasattr(v, "isoformat"): elif hasattr(v, "isoformat"):
row[k] = v.isoformat() row[k] = v.isoformat()
return row return row
_PLACEHOLDER_PREFIX = "NEUT"
def _neutralizeRowFields(row: Dict[str, Any], fieldsToNeutralize: Set[str]) -> Dict[str, Any]:
"""Replace values in sensitive fields with stable, deterministic placeholders.
The placeholder format ``[NEUT.<field>.<short-hash>]`` is stable for the same
value so that identical values in different rows produce the same token.
This allows the AI to reason about equality without seeing the real data.
"""
for field in fieldsToNeutralize:
val = row.get(field)
if val is None or val == "":
continue
shortHash = hashlib.sha256(str(val).encode()).hexdigest()[:8]
row[field] = f"[{_PLACEHOLDER_PREFIX}.{field}.{shortHash}]"
return row

View file

@ -3,7 +3,7 @@
"""Agent service: entry point for running AI agents with tool use.""" """Agent service: entry point for running AI agents with tool use."""
import logging import logging
from typing import Any, Callable, Dict, List, Optional, AsyncGenerator from typing import Any, Callable, Dict, List, Optional, Set, AsyncGenerator
from modules.datamodels.datamodelAi import ( from modules.datamodels.datamodelAi import (
AiCallRequest, AiCallOptions, AiCallResponse, OperationTypeEnum AiCallRequest, AiCallOptions, AiCallResponse, OperationTypeEnum
@ -23,6 +23,40 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _toolbox_connection_authorities(services: "_ServicesAdapter") -> List[str]:
"""Collect connection authority strings for toolbox gating (requiresConnection).
The optional ``connection`` service is not always registered; fall back to
``chat.getUserConnections()`` (same source as workspace UI).
Toolbox entries use ``microsoft`` while UserConnection may store ``msft``.
"""
seen: Set[str] = set()
try:
conn_svc = services.getService("connection")
if conn_svc and hasattr(conn_svc, "getConnections"):
for c in conn_svc.getConnections() or []:
auth = c.get("authority") if isinstance(c, dict) else getattr(c, "authority", None)
val = auth.value if hasattr(auth, "value") else str(auth or "")
if val:
seen.add(val)
except Exception:
pass
try:
chat = services.chat
if chat and hasattr(chat, "getUserConnections"):
for c in chat.getUserConnections() or []:
auth = c.get("authority") if isinstance(c, dict) else getattr(c, "authority", None)
val = auth.value if hasattr(auth, "value") else str(auth or "")
if val:
seen.add(val)
except Exception as e:
logger.debug("toolbox authorities from chat: %s", e)
if "msft" in seen:
seen.add("microsoft")
return list(seen)
class _ServicesAdapter: class _ServicesAdapter:
"""Adapter providing service access from (context, get_service).""" """Adapter providing service access from (context, get_service)."""
@ -61,10 +95,33 @@ class _ServicesAdapter:
def extraction(self): def extraction(self):
return self._getService("extraction") return self._getService("extraction")
@property
def rbac(self):
"""Same RbacClass as workflow hub (MethodBase permission checks during discoverMethods)."""
try:
chat_svc = self.chat
app = getattr(chat_svc, "interfaceDbApp", None)
if app is not None:
return getattr(app, "rbac", None)
except Exception:
return None
return None
def getService(self, name: str): def getService(self, name: str):
"""Access any service by name.""" """Access any service by name."""
return self._getService(name) return self._getService(name)
def __getattr__(self, name: str):
"""Resolve e.g. services.clickup for MethodClickup / ActionExecutor (discoverMethods)."""
if name.startswith("_"):
raise AttributeError(name)
try:
return self._getService(name)
except KeyError:
raise AttributeError(
f"{type(self).__name__!r} object has no attribute {name!r}"
) from None
@property @property
def featureCode(self) -> Optional[str]: def featureCode(self) -> Optional[str]:
w = self.workflow w = self.workflow
@ -268,7 +325,12 @@ class AgentService:
try: try:
from modules.workflows.processing.shared.methodDiscovery import discoverMethods from modules.workflows.processing.shared.methodDiscovery import discoverMethods
discoverMethods(self.services) discoverMethods(self.services)
except Exception as e:
logger.warning("discoverMethods failed before action tools: %s", e)
try:
from modules.workflows.processing.core.actionExecutor import ActionExecutor from modules.workflows.processing.core.actionExecutor import ActionExecutor
actionExecutor = ActionExecutor(self.services) actionExecutor = ActionExecutor(self.services)
adapter = ActionToolAdapter(actionExecutor) adapter = ActionToolAdapter(actionExecutor)
@ -293,7 +355,7 @@ class AgentService:
from modules.serviceCenter.services.serviceAgent.toolboxRegistry import getToolboxRegistry from modules.serviceCenter.services.serviceAgent.toolboxRegistry import getToolboxRegistry
tbRegistry = getToolboxRegistry() tbRegistry = getToolboxRegistry()
userConnections: List[str] = [] userConnections: List[str] = _toolbox_connection_authorities(self.services)
try: try:
chatService = self.services.chat if hasattr(self.services, "chat") else None chatService = self.services.chat if hasattr(self.services, "chat") else None
if chatService and hasattr(chatService, "getUserConnections"): if chatService and hasattr(chatService, "getUserConnections"):
@ -301,7 +363,7 @@ class AgentService:
for c in connections: for c in connections:
authority = c.get("authority", "") if isinstance(c, dict) else getattr(c, "authority", "") authority = c.get("authority", "") if isinstance(c, dict) else getattr(c, "authority", "")
authorityVal = authority.value if hasattr(authority, "value") else str(authority) authorityVal = authority.value if hasattr(authority, "value") else str(authority)
if authorityVal: if authorityVal and authorityVal not in userConnections:
userConnections.append(authorityVal) userConnections.append(authorityVal)
except Exception as e: except Exception as e:
logger.debug("Could not resolve user connections for toolbox activation: %s", e) logger.debug("Could not resolve user connections for toolbox activation: %s", e)
@ -386,8 +448,13 @@ class AgentService:
except Exception: except Exception:
pass pass
try: try:
from modules.serviceCenter.services.serviceAgent.actionToolAdapter import ActionToolAdapter from modules.workflows.processing.shared.methodDiscovery import discoverMethods
from modules.workflows.processing.core.actionExecutor import ActionExecutor from modules.workflows.processing.core.actionExecutor import ActionExecutor
from modules.serviceCenter.services.serviceAgent.actionToolAdapter import (
ActionToolAdapter,
)
discoverMethods(self.services)
adapter = ActionToolAdapter(ActionExecutor(self.services)) adapter = ActionToolAdapter(ActionExecutor(self.services))
adapter.registerAll(registry) adapter.registerAll(registry)
if registry.isValidTool(toolName): if registry.isValidTool(toolName):

View file

@ -173,7 +173,13 @@ def _registerDefaultToolboxes() -> None:
requiresConnection="clickup", requiresConnection="clickup",
isDefault=False, isDefault=False,
tools=[ tools=[
"clickup_searchTasks", "clickup_createTask", "clickup_updateTask", "clickup_listTasks",
"clickup_listFields",
"clickup_searchTasks",
"clickup_getTask",
"clickup_createTask",
"clickup_updateTask",
"clickup_uploadAttachment",
], ],
), ),
ToolboxDefinition( ToolboxDefinition(

View file

@ -91,6 +91,12 @@ class ContainerExtractor(Extractor):
) )
] ]
if context.get("lazyContainer"):
lazy = _extractLazyListing(fileBytes, mimeType, fileName, rootId)
if lazy is not None:
parts.extend(lazy)
return parts
state = {"totalSize": 0, "fileCount": 0} state = {"totalSize": 0, "fileCount": 0}
try: try:
childParts = _resolveContainerRecursive( childParts = _resolveContainerRecursive(
@ -112,6 +118,42 @@ class ContainerExtractor(Extractor):
return parts return parts
def _extractLazyListing(
fileBytes: bytes,
containerMime: str,
containerName: str,
parentId: str,
) -> Optional[List[ContentPart]]:
"""ZIP only: list member files with metadata (no nested extraction)."""
if containerMime not in ("application/zip", "application/x-zip-compressed") and not (containerName or "").lower().endswith(".zip"):
return None
out: List[ContentPart] = []
try:
with zipfile.ZipFile(io.BytesIO(fileBytes)) as zf:
for info in zf.infolist():
if info.is_dir():
continue
entryMime = _detectMimeType(info.filename)
out.append(
ContentPart(
id=makeId(),
parentId=parentId,
label=info.filename,
typeGroup="container",
mimeType=entryMime,
data="",
metadata={
"containerPath": info.filename,
"size": info.file_size,
"lazyReference": True,
},
)
)
except zipfile.BadZipFile:
return None
return out
def _resolveContainerRecursive( def _resolveContainerRecursive(
containerBytes: bytes, containerBytes: bytes,
containerMime: str, containerMime: str,
@ -160,8 +202,9 @@ def _addFilePart(
entryPath = f"{containerPath}/{fileName}" if containerPath else fileName entryPath = f"{containerPath}/{fileName}" if containerPath else fileName
detectedMime = _detectMimeType(fileName) detectedMime = _detectMimeType(fileName)
from ..subRegistry import ExtractorRegistry from ..subRegistry import getExtractorRegistry
registry = ExtractorRegistry()
registry = getExtractorRegistry()
extractor = registry.resolve(detectedMime, fileName) extractor = registry.resolve(detectedMime, fileName)
if extractor and not isinstance(extractor, ContainerExtractor): if extractor and not isinstance(extractor, ContainerExtractor):

View file

@ -75,8 +75,7 @@ class PdfExtractor(Extractor):
# Extract text per page with PyMuPDF (same lib as in-place search - ensures extraction matches PDF text layer) # Extract text per page with PyMuPDF (same lib as in-place search - ensures extraction matches PDF text layer)
try: try:
with io.BytesIO(fileBytes) as buf: doc = fitz.open(stream=fileBytes, filetype="pdf")
doc = fitz.open(stream=buf.getvalue(), filetype="pdf")
for i in range(len(doc)): for i in range(len(doc)):
try: try:
page = doc[i] page = doc[i]
@ -139,8 +138,7 @@ class PdfExtractor(Extractor):
# Extract images with PyMuPDF # Extract images with PyMuPDF
try: try:
with io.BytesIO(fileBytes) as buf2: doc = fitz.open(stream=fileBytes, filetype="pdf")
doc = fitz.open(stream=buf2.getvalue(), filetype="pdf")
for i in range(len(doc)): for i in range(len(doc)):
page = doc[i] page = doc[i]
images = page.get_images(full=True) images = page.get_images(full=True)

View file

@ -9,7 +9,7 @@ import asyncio
import base64 import base64
import json import json
from .subRegistry import ExtractorRegistry, ChunkerRegistry from .subRegistry import ExtractorRegistry, ChunkerRegistry, getExtractorRegistry
from .subPipeline import runExtraction from .subPipeline import runExtraction
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, MergeStrategy, ExtractionOptions, PartResult, DocumentIntent from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, MergeStrategy, ExtractionOptions, PartResult, DocumentIntent
from modules.datamodels.datamodelChat import ChatDocument from modules.datamodels.datamodelChat import ChatDocument
@ -23,7 +23,6 @@ logger = logging.getLogger(__name__)
class ExtractionService: class ExtractionService:
_sharedExtractorRegistry: Optional[ExtractorRegistry] = None
_sharedChunkerRegistry: Optional[ChunkerRegistry] = None _sharedChunkerRegistry: Optional[ChunkerRegistry] = None
def __init__(self, context, get_service: Callable[[str], Any]): def __init__(self, context, get_service: Callable[[str], Any]):
@ -35,11 +34,9 @@ class ExtractionService:
context.user, context.user,
mandateId=context.mandate_id, mandateId=context.mandate_id,
) )
if ExtractionService._sharedExtractorRegistry is None: self._extractorRegistry = getExtractorRegistry()
ExtractionService._sharedExtractorRegistry = ExtractorRegistry()
if ExtractionService._sharedChunkerRegistry is None: if ExtractionService._sharedChunkerRegistry is None:
ExtractionService._sharedChunkerRegistry = ChunkerRegistry() ExtractionService._sharedChunkerRegistry = ChunkerRegistry()
self._extractorRegistry = ExtractionService._sharedExtractorRegistry
self._chunkerRegistry = ExtractionService._sharedChunkerRegistry self._chunkerRegistry = ExtractionService._sharedChunkerRegistry
modelRegistry.ensureConnectorsRegistered() modelRegistry.ensureConnectorsRegistered()

View file

@ -4,6 +4,7 @@ from typing import List
import logging import logging
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, ExtractionOptions, MergeStrategy from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, ExtractionOptions, MergeStrategy
from modules.datamodels.datamodelUdm import _applyUdmOutputDetail
from .subUtils import makeId from .subUtils import makeId
from .subRegistry import ExtractorRegistry, ChunkerRegistry from .subRegistry import ExtractorRegistry, ChunkerRegistry
@ -29,7 +30,12 @@ def runExtraction(extractorRegistry: ExtractorRegistry, chunkerRegistry: Chunker
) )
return ContentExtracted(id=makeId(), parts=[part]) return ContentExtracted(id=makeId(), parts=[part])
parts = extractor.extract(documentBytes, {"fileName": fileName, "mimeType": mimeType}) extractCtx = {
"fileName": fileName,
"mimeType": mimeType,
"lazyContainer": options.lazyContainer,
}
parts = extractor.extract(documentBytes, extractCtx)
# REMOVED: poolAndLimit(parts, chunkerRegistry, options) # REMOVED: poolAndLimit(parts, chunkerRegistry, options)
# REMOVED: Chunking logic - now handled in AI call phase # REMOVED: Chunking logic - now handled in AI call phase
@ -40,7 +46,16 @@ def runExtraction(extractorRegistry: ExtractorRegistry, chunkerRegistry: Chunker
from .mainServiceExtraction import applyMerging from .mainServiceExtraction import applyMerging
parts = applyMerging(parts, options.mergeStrategy) parts = applyMerging(parts, options.mergeStrategy)
return ContentExtracted(id=makeId(), parts=parts) ec_id = makeId()
extracted = ContentExtracted(id=ec_id, parts=parts)
if options.outputFormat in ("udm", "both"):
udm = extractor.extractToUdm(
documentBytes,
{**extractCtx, "extractionId": ec_id},
precomputedParts=parts,
)
extracted.udm = _applyUdmOutputDetail(udm, options.outputDetail)
return extracted
# REMOVED: poolAndLimit function - chunking now handled in AI call phase # REMOVED: poolAndLimit function - chunking now handled in AI call phase

View file

@ -1,12 +1,25 @@
# Copyright (c) 2025 Patrick Motsch # Copyright (c) 2025 Patrick Motsch
# All rights reserved. # All rights reserved.
from typing import Any, Dict, Optional from typing import Any, Dict, List, Optional, TYPE_CHECKING
import logging import logging
from modules.datamodels.datamodelExtraction import ContentPart from modules.datamodels.datamodelExtraction import ContentPart
if TYPE_CHECKING:
from modules.datamodels.datamodelUdm import UdmDocument
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_extractorRegistrySingleton: Optional["ExtractorRegistry"] = None
def getExtractorRegistry() -> "ExtractorRegistry":
"""Shared ExtractorRegistry instance (avoid repeated auto-discovery e.g. per file in ZIP)."""
global _extractorRegistrySingleton
if _extractorRegistrySingleton is None:
_extractorRegistrySingleton = ExtractorRegistry()
return _extractorRegistrySingleton
class Extractor: class Extractor:
""" """
@ -27,6 +40,23 @@ class Extractor:
"""Extract content from the file bytes.""" """Extract content from the file bytes."""
raise NotImplementedError raise NotImplementedError
def extractToUdm(
self,
fileBytes: bytes,
context: Dict[str, Any],
precomputedParts: Optional[List[ContentPart]] = None,
) -> "UdmDocument":
"""Build UDM from extracted parts (default: heuristic grouping). Override for format-specific trees."""
from modules.datamodels.datamodelUdm import _contentPartsToUdm, _mimeToUdmSourceType
from modules.datamodels.datamodelExtraction import ContentExtracted
from .subUtils import makeId
parts = precomputedParts if precomputedParts is not None else self.extract(fileBytes, context)
eid = context.get("extractionId") or makeId()
extracted = ContentExtracted(id=eid, parts=parts)
src = _mimeToUdmSourceType(context.get("mimeType", ""), context.get("fileName", ""))
return _contentPartsToUdm(extracted, src, context.get("fileName", ""))
def getSupportedExtensions(self) -> list[str]: def getSupportedExtensions(self) -> list[str]:
"""Return list of supported file extensions (including dots).""" """Return list of supported file extensions (including dots)."""
return [] return []

View file

@ -0,0 +1,70 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Dynamic database registry each interface self-registers its DB on import.
Usage in any interfaceDb*.py / interfaceFeature*.py:
from modules.shared.dbRegistry import registerDatabase
registerDatabase("poweron_xyz")
"""
import logging
import threading
from typing import Dict, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
_lock = threading.Lock()
_registry: Dict[str, str] = {}
def registerDatabase(dbName: str, configPrefix: str = "DB") -> None:
"""Register a database for health monitoring.
Called at module-level by each interface so that the registry
is populated automatically as interfaces are imported.
Args:
dbName: PostgreSQL database name (e.g. "poweron_app").
configPrefix: Config key prefix for host/port/user/password.
Default "DB" reads DB_HOST, DB_PORT, etc.
"""
with _lock:
if dbName in _registry:
return
_registry[dbName] = configPrefix
logger.debug(f"Database registered: {dbName} (configPrefix={configPrefix})")
def _getRegisteredDatabases() -> Dict[str, str]:
"""Return snapshot of all registered databases {dbName: configPrefix}."""
with _lock:
return dict(_registry)
def _getConnectorForDb(dbName: str) -> DatabaseConnector:
"""Create a lightweight DatabaseConnector for the given registered DB.
Intended for read-only health queries (pg_stat, orphan scans).
Uses the same APP_CONFIG credentials as the application connectors.
"""
with _lock:
configPrefix = _registry.get(dbName)
if configPrefix is None:
raise ValueError(f"Database '{dbName}' is not registered.")
hostKey = f"{configPrefix}_HOST" if configPrefix != "DB" else "DB_HOST"
portKey = f"{configPrefix}_PORT" if configPrefix != "DB" else "DB_PORT"
userKey = f"{configPrefix}_USER" if configPrefix != "DB" else "DB_USER"
passwordKey = f"{configPrefix}_PASSWORD_SECRET" if configPrefix != "DB" else "DB_PASSWORD_SECRET"
return DatabaseConnector(
dbHost=APP_CONFIG.get(hostKey, "localhost"),
dbDatabase=dbName,
dbUser=APP_CONFIG.get(userKey),
dbPassword=APP_CONFIG.get(passwordKey),
dbPort=int(APP_CONFIG.get(portKey, 5432)),
)

View file

@ -0,0 +1,243 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
FK-Discovery scans the Model-Registry for `fk_target` annotations and
builds a cached list of foreign-key relationships.
Each relationship describes one directed edge:
sourceTable.sourceColumn targetTable.targetColumn
(possibly across databases)
The tabledb mapping is derived automatically from the `fk_target`
annotations themselves: every `fk_target` declares `{"db": "...", "table": "..."}`
for the *target* side. By collecting all such declarations we know which DB
each table lives in no extra registration step needed.
Usage:
from modules.shared.fkRegistry import _getFkRelationships
rels = _getFkRelationships()
"""
import importlib
import logging
import os
import threading
from dataclasses import dataclass
from typing import Dict, List, Optional
from modules.datamodels.datamodelBase import _MODEL_REGISTRY
logger = logging.getLogger(__name__)
_modelsLoaded = False
def _ensureModelsLoaded() -> None:
"""Import all datamodel modules so that __init_subclass__ fills _MODEL_REGISTRY.
In a running server the interfaces import the datamodels automatically.
This function makes FK-Discovery work in standalone / test contexts too.
"""
global _modelsLoaded
if _modelsLoaded:
return
gatewayRoot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
datamodelDir = os.path.join(gatewayRoot, "modules", "datamodels")
for fname in os.listdir(datamodelDir):
if fname.startswith("datamodel") and fname.endswith(".py") and fname != "__init__.py":
modName = f"modules.datamodels.{fname[:-3]}"
try:
importlib.import_module(modName)
except Exception as e:
logger.debug(f"Could not import {modName}: {e}")
featuresDir = os.path.join(gatewayRoot, "modules", "features")
if os.path.isdir(featuresDir):
for featureDir in os.listdir(featuresDir):
featurePath = os.path.join(featuresDir, featureDir)
if not os.path.isdir(featurePath):
continue
for fname in os.listdir(featurePath):
if fname.startswith("datamodel") and fname.endswith(".py"):
modName = f"modules.features.{featureDir}.{fname[:-3]}"
try:
importlib.import_module(modName)
except Exception as e:
logger.debug(f"Could not import {modName}: {e}")
_modelsLoaded = True
_lock = threading.Lock()
_cachedRelationships: Optional[List["FkRelationship"]] = None
_cachedTableToDb: Optional[Dict[str, str]] = None
@dataclass(frozen=True)
class FkRelationship:
sourceDb: str
sourceTable: str
sourceColumn: str
targetDb: str
targetTable: str
targetColumn: str
def _buildTableToDbMap() -> Dict[str, str]:
"""Derive {tableName → dbName} for every PowerOnModel subclass.
Two-pass approach:
1. Collect explicit mappings from fk_target annotations
(every fk_target declares the DB for its *target* table).
2. For models still unmapped, query each registered database's
catalog (information_schema) to find the table there.
"""
_ensureModelsLoaded()
mapping: Dict[str, str] = {}
for modelCls in _MODEL_REGISTRY.values():
for fieldInfo in modelCls.model_fields.values():
extra = fieldInfo.json_schema_extra
if not isinstance(extra, dict):
continue
fkTarget = extra.get("fk_target")
if not isinstance(fkTarget, dict):
continue
table = fkTarget.get("table", "")
db = fkTarget.get("db", "")
if table and db:
mapping[table] = db
unmapped = [name for name in _MODEL_REGISTRY if name not in mapping]
if unmapped:
try:
from modules.shared.dbRegistry import _getRegisteredDatabases
_resolveUnmappedTablesFromCatalog(mapping, unmapped, _getRegisteredDatabases())
except Exception as e:
logger.warning(f"Could not resolve unmapped tables from catalog: {e}")
return mapping
def _resolveUnmappedTablesFromCatalog(
mapping: Dict[str, str],
unmapped: List[str],
registeredDbs: Dict[str, str],
) -> None:
"""Query information_schema in each registered DB for unmapped table names."""
import psycopg2
import psycopg2.extras
from modules.shared.configuration import APP_CONFIG
unmappedSet = set(unmapped)
for dbName, configPrefix in registeredDbs.items():
if not unmappedSet:
break
try:
hostKey = f"{configPrefix}_HOST" if configPrefix != "DB" else "DB_HOST"
portKey = f"{configPrefix}_PORT" if configPrefix != "DB" else "DB_PORT"
userKey = f"{configPrefix}_USER" if configPrefix != "DB" else "DB_USER"
pwKey = f"{configPrefix}_PASSWORD_SECRET" if configPrefix != "DB" else "DB_PASSWORD_SECRET"
conn = psycopg2.connect(
host=APP_CONFIG.get(hostKey, "localhost"),
port=int(APP_CONFIG.get(portKey, 5432)),
database=dbName,
user=APP_CONFIG.get(userKey),
password=APP_CONFIG.get(pwKey),
client_encoding="utf8",
)
try:
with conn.cursor() as cur:
cur.execute("""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name NOT LIKE '\\_%%'
""")
dbTables = {row[0] for row in cur.fetchall()}
for tableName in list(unmappedSet):
if tableName in dbTables:
mapping[tableName] = dbName
unmappedSet.discard(tableName)
finally:
conn.close()
except Exception as e:
logger.debug(f"Catalog query for {dbName} failed: {e}")
def _discoverFkRelationships() -> List[FkRelationship]:
"""Scan every PowerOnModel subclass for `fk_target` in json_schema_extra.
Returns a de-duplicated, sorted list of FkRelationship objects.
"""
tableToDb = _buildTableToDbMap()
relationships: List[FkRelationship] = []
for tableName, modelCls in _MODEL_REGISTRY.items():
sourceDb = tableToDb.get(tableName)
if sourceDb is None:
continue
for fieldName, fieldInfo in modelCls.model_fields.items():
extra = fieldInfo.json_schema_extra
if not isinstance(extra, dict):
continue
fkTarget = extra.get("fk_target")
if not isinstance(fkTarget, dict):
continue
targetDb = fkTarget.get("db", "")
targetTable = fkTarget.get("table", "")
targetColumn = fkTarget.get("column", "id")
if not targetDb or not targetTable:
continue
relationships.append(
FkRelationship(
sourceDb=sourceDb,
sourceTable=tableName,
sourceColumn=fieldName,
targetDb=targetDb,
targetTable=targetTable,
targetColumn=targetColumn,
)
)
relationships.sort(key=lambda r: (r.sourceDb, r.sourceTable, r.sourceColumn))
return relationships
def _getFkRelationships() -> List[FkRelationship]:
"""Return the cached list of FK relationships (discovered on first call)."""
global _cachedRelationships
with _lock:
if _cachedRelationships is not None:
return _cachedRelationships
rels = _discoverFkRelationships()
with _lock:
_cachedRelationships = rels
return rels
def _getTableToDbMap() -> Dict[str, str]:
"""Return the cached table→db mapping (built on first call)."""
global _cachedTableToDb
with _lock:
if _cachedTableToDb is not None:
return _cachedTableToDb
mapping = _buildTableToDbMap()
with _lock:
_cachedTableToDb = mapping
return mapping
def _invalidateFkCache() -> None:
"""Force re-scan on next call (useful for testing)."""
global _cachedRelationships, _cachedTableToDb
with _lock:
_cachedRelationships = None
_cachedTableToDb = None

View file

@ -0,0 +1,405 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Database health utilities table statistics and orphan detection/cleanup.
All functions are intended for SysAdmin use only (access control in the route layer).
"""
import logging
import time
import threading
from dataclasses import dataclass, asdict
from typing import Dict, List, Optional, Set
import psycopg2
import psycopg2.extras
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import _getRegisteredDatabases
from modules.shared.fkRegistry import _getFkRelationships, FkRelationship
logger = logging.getLogger(__name__)
_ORPHAN_CACHE_TTL = 300 # 5 minutes
_orphanCacheLock = threading.Lock()
_orphanCache: Optional[Dict] = None # {"ts": float, "results": [...]}
# ---------------------------------------------------------------------------
# Dataclasses
# ---------------------------------------------------------------------------
@dataclass
class TableStats:
db: str
table: str
estimatedRows: int
totalSizeBytes: int
indexSizeBytes: int
lastVacuum: Optional[str]
lastAnalyze: Optional[str]
@dataclass
class OrphanResult:
sourceDb: str
sourceTable: str
sourceColumn: str
targetDb: str
targetTable: str
targetColumn: str
orphanCount: int
# ---------------------------------------------------------------------------
# Low-level DB helpers (read-only, lightweight connections)
# ---------------------------------------------------------------------------
def _getConnection(dbName: str):
"""Open a psycopg2 connection to the given registered database."""
registeredDbs = _getRegisteredDatabases()
configPrefix = registeredDbs.get(dbName)
if configPrefix is None:
raise ValueError(f"Database '{dbName}' is not registered.")
hostKey = f"{configPrefix}_HOST" if configPrefix != "DB" else "DB_HOST"
portKey = f"{configPrefix}_PORT" if configPrefix != "DB" else "DB_PORT"
userKey = f"{configPrefix}_USER" if configPrefix != "DB" else "DB_USER"
passwordKey = (
f"{configPrefix}_PASSWORD_SECRET" if configPrefix != "DB" else "DB_PASSWORD_SECRET"
)
return psycopg2.connect(
host=APP_CONFIG.get(hostKey, "localhost"),
port=int(APP_CONFIG.get(portKey, 5432)),
database=dbName,
user=APP_CONFIG.get(userKey),
password=APP_CONFIG.get(passwordKey),
client_encoding="utf8",
cursor_factory=psycopg2.extras.RealDictCursor,
)
# ---------------------------------------------------------------------------
# Table statistics
# ---------------------------------------------------------------------------
def _getTableStats(dbFilter: Optional[str] = None) -> List[dict]:
"""Query pg_stat_user_tables + pg_total_relation_size for every registered DB.
Returns a list of TableStats dicts, optionally filtered by database name.
"""
registeredDbs = _getRegisteredDatabases()
if dbFilter:
registeredDbs = {k: v for k, v in registeredDbs.items() if k == dbFilter}
results: List[dict] = []
for dbName in sorted(registeredDbs):
try:
conn = _getConnection(dbName)
try:
with conn.cursor() as cur:
cur.execute("""
SELECT
s.relname AS "table",
s.n_live_tup AS "estimatedRows",
pg_total_relation_size(quote_ident(s.relname)) AS "totalSizeBytes",
pg_indexes_size(quote_ident(s.relname)) AS "indexSizeBytes",
s.last_vacuum::text AS "lastVacuum",
s.last_analyze::text AS "lastAnalyze"
FROM pg_stat_user_tables s
WHERE s.schemaname = 'public'
AND s.relname NOT LIKE '\\_%%'
ORDER BY s.relname
""")
for row in cur.fetchall():
results.append(asdict(TableStats(
db=dbName,
table=row["table"],
estimatedRows=row["estimatedRows"],
totalSizeBytes=row["totalSizeBytes"],
indexSizeBytes=row["indexSizeBytes"],
lastVacuum=row["lastVacuum"],
lastAnalyze=row["lastAnalyze"],
)))
finally:
conn.close()
except Exception as e:
logger.error(f"Failed to get table stats for {dbName}: {e}")
return results
# ---------------------------------------------------------------------------
# Orphan scanning
# ---------------------------------------------------------------------------
def _loadParentIds(conn, tableName: str, columnName: str) -> Set[str]:
"""Load all distinct values of a column from a table (for cross-DB checks)."""
ids: Set[str] = set()
with conn.cursor() as cur:
cur.execute(f'SELECT DISTINCT "{columnName}" FROM "{tableName}"')
for row in cur.fetchall():
val = row[columnName]
if val is not None:
ids.add(str(val))
return ids
def _countOrphansSameDb(
conn, sourceTable: str, sourceColumn: str,
targetTable: str, targetColumn: str,
) -> int:
"""Count orphans when source and target live in the same DB."""
with conn.cursor() as cur:
cur.execute(f"""
SELECT COUNT(*) AS cnt
FROM "{sourceTable}" s
WHERE s."{sourceColumn}" IS NOT NULL
AND s."{sourceColumn}" != ''
AND NOT EXISTS (
SELECT 1 FROM "{targetTable}" t
WHERE t."{targetColumn}" = s."{sourceColumn}"
)
""")
return cur.fetchone()["cnt"]
def _countOrphansCrossDb(
sourceConn, sourceTable: str, sourceColumn: str,
parentIds: Set[str],
) -> int:
"""Count orphans when parent IDs come from a different DB."""
if not parentIds:
with sourceConn.cursor() as cur:
cur.execute(f"""
SELECT COUNT(*) AS cnt
FROM "{sourceTable}"
WHERE "{sourceColumn}" IS NOT NULL
AND "{sourceColumn}" != ''
""")
return cur.fetchone()["cnt"]
with sourceConn.cursor() as cur:
cur.execute(f"""
SELECT COUNT(*) AS cnt
FROM "{sourceTable}"
WHERE "{sourceColumn}" IS NOT NULL
AND "{sourceColumn}" != ''
AND "{sourceColumn}" NOT IN (
SELECT unnest(%(ids)s::text[])
)
""", {"ids": list(parentIds)})
return cur.fetchone()["cnt"]
def _scanOrphans(dbFilter: Optional[str] = None) -> List[dict]:
"""Scan for orphaned records across all FK relationships.
Uses a 5-minute cache to avoid repeated heavy scans.
"""
global _orphanCache
with _orphanCacheLock:
if _orphanCache and (time.time() - _orphanCache["ts"]) < _ORPHAN_CACHE_TTL:
cached = _orphanCache["results"]
if dbFilter:
return [r for r in cached if r["sourceDb"] == dbFilter]
return list(cached)
relationships = _getFkRelationships()
if dbFilter:
relationships = [r for r in relationships if r.sourceDb == dbFilter]
connCache: Dict[str, any] = {}
tableCache: Dict[str, Set[str]] = {}
parentIdCache: Dict[str, Set[str]] = {}
results: List[dict] = []
def _ensureConn(dbName: str):
if dbName not in connCache:
connCache[dbName] = _getConnection(dbName)
return connCache[dbName]
def _existingTables(dbName: str) -> Set[str]:
"""Cached lookup of physically existing public tables in a DB."""
if dbName not in tableCache:
try:
conn = _ensureConn(dbName)
with conn.cursor() as cur:
cur.execute("""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public'
""")
tableCache[dbName] = {row["table_name"] for row in cur.fetchall()}
except Exception:
tableCache[dbName] = set()
return tableCache[dbName]
try:
for rel in relationships:
try:
sourceTables = _existingTables(rel.sourceDb)
if rel.sourceTable not in sourceTables:
continue
if rel.sourceDb == rel.targetDb:
if rel.targetTable not in sourceTables:
continue
else:
targetTables = _existingTables(rel.targetDb)
if rel.targetTable not in targetTables:
continue
sourceConn = _ensureConn(rel.sourceDb)
if rel.sourceDb == rel.targetDb:
count = _countOrphansSameDb(
sourceConn, rel.sourceTable, rel.sourceColumn,
rel.targetTable, rel.targetColumn,
)
else:
parentKey = f"{rel.targetDb}.{rel.targetTable}.{rel.targetColumn}"
if parentKey not in parentIdCache:
targetConn = _ensureConn(rel.targetDb)
parentIdCache[parentKey] = _loadParentIds(
targetConn, rel.targetTable, rel.targetColumn,
)
count = _countOrphansCrossDb(
sourceConn, rel.sourceTable, rel.sourceColumn,
parentIdCache[parentKey],
)
results.append(asdict(OrphanResult(
sourceDb=rel.sourceDb,
sourceTable=rel.sourceTable,
sourceColumn=rel.sourceColumn,
targetDb=rel.targetDb,
targetTable=rel.targetTable,
targetColumn=rel.targetColumn,
orphanCount=count,
)))
except Exception as e:
logger.warning(
f"Orphan scan failed for {rel.sourceDb}.{rel.sourceTable}.{rel.sourceColumn}: {e}"
)
for dbKey in (rel.sourceDb, rel.targetDb):
if dbKey in connCache:
try:
connCache[dbKey].rollback()
except Exception:
pass
finally:
for conn in connCache.values():
try:
conn.close()
except Exception:
pass
with _orphanCacheLock:
_orphanCache = {"ts": time.time(), "results": results}
return results
# ---------------------------------------------------------------------------
# Orphan cleanup
# ---------------------------------------------------------------------------
def _cleanOrphans(db: str, table: str, column: str) -> int:
"""Delete orphaned records for a single FK relationship. Returns count deleted."""
relationships = _getFkRelationships()
rel = next(
(r for r in relationships
if r.sourceDb == db and r.sourceTable == table and r.sourceColumn == column),
None,
)
if rel is None:
raise ValueError(f"No FK relationship found for {db}.{table}.{column}")
conn = _getConnection(rel.sourceDb)
try:
if rel.sourceDb == rel.targetDb:
with conn.cursor() as cur:
cur.execute(f"""
DELETE FROM "{rel.sourceTable}"
WHERE "{rel.sourceColumn}" IS NOT NULL
AND "{rel.sourceColumn}" != ''
AND NOT EXISTS (
SELECT 1 FROM "{rel.targetTable}" t
WHERE t."{rel.targetColumn}" = "{rel.sourceTable}"."{rel.sourceColumn}"
)
""")
deleted = cur.rowcount
conn.commit()
else:
targetConn = _getConnection(rel.targetDb)
try:
parentIds = _loadParentIds(targetConn, rel.targetTable, rel.targetColumn)
finally:
targetConn.close()
if not parentIds:
with conn.cursor() as cur:
cur.execute(f"""
DELETE FROM "{rel.sourceTable}"
WHERE "{rel.sourceColumn}" IS NOT NULL
AND "{rel.sourceColumn}" != ''
""")
deleted = cur.rowcount
else:
with conn.cursor() as cur:
cur.execute(f"""
DELETE FROM "{rel.sourceTable}"
WHERE "{rel.sourceColumn}" IS NOT NULL
AND "{rel.sourceColumn}" != ''
AND "{rel.sourceColumn}" NOT IN (
SELECT unnest(%(ids)s::text[])
)
""", {"ids": list(parentIds)})
deleted = cur.rowcount
conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close()
_invalidateOrphanCache()
logger.info(f"Cleaned {deleted} orphans from {db}.{table}.{column}")
return deleted
def _cleanAllOrphans() -> List[dict]:
"""Clean all detected orphans. Returns list of {db, table, column, deleted}."""
orphans = _scanOrphans()
results = []
for orphan in orphans:
try:
deleted = _cleanOrphans(orphan["sourceDb"], orphan["sourceTable"], orphan["sourceColumn"])
results.append({
"db": orphan["sourceDb"],
"table": orphan["sourceTable"],
"column": orphan["sourceColumn"],
"deleted": deleted,
})
except Exception as e:
logger.error(
f"Failed to clean orphans for {orphan['sourceDb']}.{orphan['sourceTable']}.{orphan['sourceColumn']}: {e}"
)
results.append({
"db": orphan["sourceDb"],
"table": orphan["sourceTable"],
"column": orphan["sourceColumn"],
"deleted": 0,
"error": str(e),
})
return results
def _invalidateOrphanCache() -> None:
global _orphanCache
with _orphanCacheLock:
_orphanCache = None

View file

@ -92,6 +92,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaLink", "icon": "FaLink",
"path": "/basedata/connections", "path": "/basedata/connections",
"order": 10, "order": 10,
"public": True,
}, },
{ {
"id": "files", "id": "files",
@ -100,6 +101,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaRegFileAlt", "icon": "FaRegFileAlt",
"path": "/basedata/files", "path": "/basedata/files",
"order": 20, "order": 20,
"public": True,
}, },
{ {
"id": "prompts", "id": "prompts",
@ -108,6 +110,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaLightbulb", "icon": "FaLightbulb",
"path": "/basedata/prompts", "path": "/basedata/prompts",
"order": 30, "order": 30,
"public": True,
}, },
], ],
}, },
@ -329,6 +332,16 @@ NAVIGATION_SECTIONS = [
"adminOnly": True, "adminOnly": True,
"sysAdminOnly": True, "sysAdminOnly": True,
}, },
{
"id": "admin-database-health",
"objectKey": "ui.admin.databaseHealth",
"label": t("Datenbank-Gesundheit"),
"icon": "FaDatabase",
"path": "/admin/database-health",
"order": 98,
"adminOnly": True,
"sysAdminOnly": True,
},
{ {
"id": "admin-demo-config", "id": "admin-demo-config",
"objectKey": "ui.admin.demoConfig", "objectKey": "ui.admin.demoConfig",
@ -495,6 +508,11 @@ RESOURCE_OBJECTS = [
"label": "Store: CommCoach", "label": "Store: CommCoach",
"meta": {"category": "store", "featureCode": "commcoach"} "meta": {"category": "store", "featureCode": "commcoach"}
}, },
{
"objectKey": "resource.store.trustee",
"label": "Store: Trustee",
"meta": {"category": "store", "featureCode": "trustee"}
},
{ {
"objectKey": "resource.system.api.auth", "objectKey": "resource.system.api.auth",
"label": "Authentifizierungs-API", "label": "Authentifizierungs-API",

View file

@ -28,6 +28,8 @@ from modules.workflows.automation2.executors import (
) )
from modules.features.graphicalEditor.portTypes import _normalizeToSchema from modules.features.graphicalEditor.portTypes import _normalizeToSchema
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException as _SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError as _BillingContextError
from modules.workflows.automation2.runEnvelope import normalize_run_envelope from modules.workflows.automation2.runEnvelope import normalize_run_envelope
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -159,7 +161,8 @@ def _getExecutor(
return DataExecutor() return DataExecutor()
if (nodeType.startswith("ai.") or nodeType.startswith("email.") if (nodeType.startswith("ai.") or nodeType.startswith("email.")
or nodeType.startswith("sharepoint.") or nodeType.startswith("clickup.") or nodeType.startswith("sharepoint.") or nodeType.startswith("clickup.")
or nodeType.startswith("file.") or nodeType.startswith("trustee.")): or nodeType.startswith("file.") or nodeType.startswith("trustee.")
or nodeType.startswith("context.")):
return ActionNodeExecutor(services) return ActionNodeExecutor(services)
if nodeType.startswith("input.") and automation2_interface: if nodeType.startswith("input.") and automation2_interface:
return InputExecutor(automation2_interface) return InputExecutor(automation2_interface)
@ -278,7 +281,7 @@ async def _executeWithRetry(executor, node, context, maxRetries: int = 0, retryD
try: try:
result = await executor.execute(node, context) result = await executor.execute(node, context)
return result, attempt return result, attempt
except (PauseForHumanTaskError, PauseForEmailWaitError): except (PauseForHumanTaskError, PauseForEmailWaitError, _SubscriptionInactiveException, _BillingContextError):
raise raise
except Exception as e: except Exception as e:
lastError = e lastError = e
@ -424,6 +427,10 @@ async def executeGraph(
processed_in_loop: Set[str] = set() processed_in_loop: Set[str] = set()
_aggregateAccumulators: Dict[str, list] = {} _aggregateAccumulators: Dict[str, list] = {}
STEPLOG_BATCH_THRESHOLD = 100
AGGREGATE_FLUSH_THRESHOLD = 1000
_aggregateTempChunks: Dict[str, List[list]] = {}
# Check for loop resume: run was paused inside a loop, we're resuming for next iteration # Check for loop resume: run was paused inside a loop, we're resuming for next iteration
run = automation2_interface.getRun(runId) if (runId and automation2_interface) else None run = automation2_interface.getRun(runId) if (runId and automation2_interface) else None
loop_resume_state = (run.get("context") or {}).get("_loopState") if run else None loop_resume_state = (run.get("context") or {}).get("_loopState") if run else None
@ -483,6 +490,10 @@ async def executeGraph(
_updateStepLog(automation2_interface, _rStepId, "completed", _updateStepLog(automation2_interface, _rStepId, "completed",
durationMs=int((time.time() - _rStepStart) * 1000)) durationMs=int((time.time() - _rStepStart) * 1000))
raise raise
except (_SubscriptionInactiveException, _BillingContextError):
_updateStepLog(automation2_interface, _rStepId, "failed",
error="Subscription/Billing error", durationMs=int((time.time() - _rStepStart) * 1000))
raise
except Exception as ex: except Exception as ex:
_updateStepLog(automation2_interface, _rStepId, "failed", _updateStepLog(automation2_interface, _rStepId, "failed",
error=str(ex), durationMs=int((time.time() - _rStepStart) * 1000)) error=str(ex), durationMs=int((time.time() - _rStepStart) * 1000))
@ -551,71 +562,143 @@ async def executeGraph(
body_ordered = [n for n in ordered if n.get("id") in body_ids] body_ordered = [n for n in ordered if n.get("id") in body_ids]
processed_in_loop.update(body_ids) processed_in_loop.update(body_ids)
processed_in_loop.add(nodeId) processed_in_loop.add(nodeId)
for idx, item in enumerate(items): _loopConcurrency = int((node.get("parameters") or {}).get("concurrency", 1))
nodeOutputs[nodeId] = {"items": items, "count": len(items), "currentItem": item, "currentIndex": idx} _loopConcurrency = max(1, min(_loopConcurrency, 20))
context["_loopState"] = {"loopNodeId": nodeId, "currentIndex": idx, "items": items} _batchMode = len(items) > STEPLOG_BATCH_THRESHOLD
_aggLock = asyncio.Lock()
async def _runLoopIteration(_idx: int, _item: Any) -> Optional[Dict]:
"""Execute all body nodes for one iteration. Returns error dict or None."""
_iterOutputs = dict(nodeOutputs)
_iterOutputs[nodeId] = {"items": items, "count": len(items), "currentItem": _item, "currentIndex": _idx}
_iterCtx = dict(context)
_iterCtx["nodeOutputs"] = _iterOutputs if _loopConcurrency > 1 else nodeOutputs
_iterCtx["_loopState"] = {"loopNodeId": nodeId, "currentIndex": _idx, "items": items}
if _loopConcurrency == 1:
nodeOutputs[nodeId] = _iterOutputs[nodeId]
context["_loopState"] = _iterCtx["_loopState"]
_activeOutputs = _iterOutputs if _loopConcurrency > 1 else nodeOutputs
_activeCtx = _iterCtx if _loopConcurrency > 1 else context
for body_node in body_ordered: for body_node in body_ordered:
bnid = body_node.get("id") bnid = body_node.get("id")
if not bnid or context.get("_stopped"): if not bnid or context.get("_stopped"):
break break
if not _is_node_on_active_path(bnid, connectionMap, nodeOutputs): if not _is_node_on_active_path(bnid, connectionMap, _activeOutputs):
continue continue
bexec = _getExecutor(body_node.get("type", ""), services, automation2_interface) bexec = _getExecutor(body_node.get("type", ""), services, automation2_interface)
if not bexec: if not bexec:
nodeOutputs[bnid] = None _activeOutputs[bnid] = None
continue continue
_bStepStart = time.time() _bStepStart = time.time()
_bInputSnap = {"_loopItem": item, "_loopIndex": idx} _bStepId = None
for _bSrc, _, _ in connectionMap.get(bnid, []): if not _batchMode or _idx == 0 or _idx == len(items) - 1:
if _bSrc in nodeOutputs: _bInputSnap = {"_loopItem": _item, "_loopIndex": _idx}
_bInputSnap[_bSrc] = nodeOutputs[_bSrc]
_bStepId = _createStepLog(automation2_interface, runId, bnid, body_node.get("type", ""), "running", _bInputSnap) _bStepId = _createStepLog(automation2_interface, runId, bnid, body_node.get("type", ""), "running", _bInputSnap)
try: try:
bres, _bRetry = await _executeWithRetry(bexec, body_node, context) bres, _bRetry = await _executeWithRetry(bexec, body_node, _activeCtx)
# data.aggregate: accumulate instead of overwrite
if body_node.get("type") == "data.aggregate": if body_node.get("type") == "data.aggregate":
async with _aggLock:
if bnid not in _aggregateAccumulators: if bnid not in _aggregateAccumulators:
_aggregateAccumulators[bnid] = [] _aggregateAccumulators[bnid] = []
accItems = bres.get("items", [bres]) if isinstance(bres, dict) else [bres] accItems = bres.get("items", [bres]) if isinstance(bres, dict) else [bres]
_aggregateAccumulators[bnid].extend(accItems) _aggregateAccumulators[bnid].extend(accItems)
nodeOutputs[bnid] = bres if len(_aggregateAccumulators[bnid]) >= AGGREGATE_FLUSH_THRESHOLD:
_aggregateTempChunks.setdefault(bnid, []).append(_aggregateAccumulators[bnid])
_aggregateAccumulators[bnid] = []
_activeOutputs[bnid] = bres
if _bStepId:
_bDur = int((time.time() - _bStepStart) * 1000) _bDur = int((time.time() - _bStepStart) * 1000)
_updateStepLog(automation2_interface, _bStepId, "completed", _updateStepLog(automation2_interface, _bStepId, "completed",
output=bres if isinstance(bres, dict) else {"value": bres}, output=bres if isinstance(bres, dict) else {"value": bres},
durationMs=_bDur, retryCount=_bRetry) durationMs=_bDur, retryCount=_bRetry)
logger.info("executeGraph loop body node %s done (iter %d, retries=%d)", bnid, idx, _bRetry) if _loopConcurrency == 1:
nodeOutputs[bnid] = bres
except PauseForHumanTaskError as e: except PauseForHumanTaskError as e:
if _bStepId:
_updateStepLog(automation2_interface, _bStepId, "completed", _updateStepLog(automation2_interface, _bStepId, "completed",
durationMs=int((time.time() - _bStepStart) * 1000)) durationMs=int((time.time() - _bStepStart) * 1000))
if runId and automation2_interface: if runId and automation2_interface:
run = automation2_interface.getRun(runId) or {} _run = automation2_interface.getRun(runId) or {}
run_ctx = dict(run.get("context") or {}) _run_ctx = dict(_run.get("context") or {})
run_ctx["_loopState"] = {"loopNodeId": nodeId, "currentIndex": idx, "items": items} _run_ctx["_loopState"] = {"loopNodeId": nodeId, "currentIndex": _idx, "items": items}
automation2_interface.updateRun(e.runId, status="paused", nodeOutputs=_serializableOutputs(nodeOutputs), currentNodeId=e.nodeId, context=run_ctx) automation2_interface.updateRun(e.runId, status="paused", nodeOutputs=_serializableOutputs(nodeOutputs), currentNodeId=e.nodeId, context=_run_ctx)
return {"success": False, "paused": True, "taskId": e.taskId, "runId": e.runId, "nodeId": e.nodeId, "nodeOutputs": _serializableOutputs(nodeOutputs)} return {"_pause": True, "taskId": e.taskId, "runId": e.runId, "nodeId": e.nodeId}
except PauseForEmailWaitError as e: except PauseForEmailWaitError:
if _bStepId:
_updateStepLog(automation2_interface, _bStepId, "completed", _updateStepLog(automation2_interface, _bStepId, "completed",
durationMs=int((time.time() - _bStepStart) * 1000)) durationMs=int((time.time() - _bStepStart) * 1000))
raise raise
except (_SubscriptionInactiveException, _BillingContextError):
if _bStepId:
_updateStepLog(automation2_interface, _bStepId, "failed",
error="Subscription/Billing error", durationMs=int((time.time() - _bStepStart) * 1000))
raise
except Exception as ex: except Exception as ex:
if _bStepId:
_updateStepLog(automation2_interface, _bStepId, "failed", _updateStepLog(automation2_interface, _bStepId, "failed",
error=str(ex), durationMs=int((time.time() - _bStepStart) * 1000)) error=str(ex), durationMs=int((time.time() - _bStepStart) * 1000))
logger.exception("executeGraph loop body node %s FAILED: %s", bnid, ex) logger.exception("executeGraph loop body node %s FAILED (iter %d): %s", bnid, _idx, ex)
nodeOutputs[bnid] = {"error": str(ex), "success": False} return {"_error": str(ex), "failedNode": bnid}
if _batchMode and _idx > 0 and _idx % STEPLOG_BATCH_THRESHOLD == 0 and runId:
_emitStepEvent(runId, {"type": "loop_progress", "nodeId": nodeId, "iteration": _idx, "total": len(items)})
return None
if _loopConcurrency <= 1:
for idx, item in enumerate(items):
iterErr = await _runLoopIteration(idx, item)
if iterErr:
if iterErr.get("_pause"):
return {"success": False, "paused": True, "taskId": iterErr["taskId"], "runId": iterErr["runId"], "nodeId": iterErr["nodeId"], "nodeOutputs": _serializableOutputs(nodeOutputs)}
nodeOutputs[iterErr.get("failedNode", nodeId)] = {"error": iterErr["_error"], "success": False}
if runId and automation2_interface: if runId and automation2_interface:
automation2_interface.updateRun(runId, status="failed", nodeOutputs=_serializableOutputs(nodeOutputs)) automation2_interface.updateRun(runId, status="failed", nodeOutputs=_serializableOutputs(nodeOutputs))
if runId: if runId:
_activeRunContexts.pop(runId, None) _activeRunContexts.pop(runId, None)
return {"success": False, "error": str(ex), "nodeOutputs": _serializableOutputs(nodeOutputs), "failedNode": bnid, "runId": runId} return {"success": False, "error": iterErr["_error"], "nodeOutputs": _serializableOutputs(nodeOutputs), "failedNode": iterErr.get("failedNode"), "runId": runId}
else:
_sem = asyncio.Semaphore(_loopConcurrency)
async def _concurrentIter(_ci: int, _citem: Any):
async with _sem:
return await _runLoopIteration(_ci, _citem)
_tasks = [_concurrentIter(ci, citem) for ci, citem in enumerate(items)]
_results = await asyncio.gather(*_tasks, return_exceptions=True)
for _ri, _rval in enumerate(_results):
if isinstance(_rval, Exception):
logger.exception("Loop iteration %d raised: %s", _ri, _rval)
if runId and automation2_interface:
automation2_interface.updateRun(runId, status="failed", nodeOutputs=_serializableOutputs(nodeOutputs))
if runId:
_activeRunContexts.pop(runId, None)
return {"success": False, "error": str(_rval), "nodeOutputs": _serializableOutputs(nodeOutputs), "runId": runId}
if isinstance(_rval, dict):
if _rval.get("_pause"):
return {"success": False, "paused": True, "taskId": _rval["taskId"], "runId": _rval["runId"], "nodeId": _rval["nodeId"], "nodeOutputs": _serializableOutputs(nodeOutputs)}
if _rval.get("_error"):
if runId and automation2_interface:
automation2_interface.updateRun(runId, status="failed", nodeOutputs=_serializableOutputs(nodeOutputs))
if runId:
_activeRunContexts.pop(runId, None)
return {"success": False, "error": _rval["_error"], "nodeOutputs": _serializableOutputs(nodeOutputs), "failedNode": _rval.get("failedNode"), "runId": runId}
nodeOutputs[nodeId] = {"items": items, "count": len(items)} nodeOutputs[nodeId] = {"items": items, "count": len(items)}
# Finalize aggregate accumulators after loop
for aggId, accItems in _aggregateAccumulators.items(): for aggId, accItems in _aggregateAccumulators.items():
nodeOutputs[aggId] = {"items": accItems, "count": len(accItems), "_success": True} allChunks = _aggregateTempChunks.pop(aggId, [])
finalItems = []
for chunk in allChunks:
finalItems.extend(chunk)
finalItems.extend(accItems)
nodeOutputs[aggId] = {"items": finalItems, "count": len(finalItems), "_success": True}
_aggregateAccumulators.clear() _aggregateAccumulators.clear()
_updateStepLog(automation2_interface, _stepId, "completed", _updateStepLog(automation2_interface, _stepId, "completed",
output={"iterationCount": len(items), "items": len(items)}, output={"iterationCount": len(items), "items": len(items), "concurrency": _loopConcurrency, "batchMode": _batchMode},
durationMs=int((time.time() - _stepStartMs) * 1000)) durationMs=int((time.time() - _stepStartMs) * 1000))
logger.info("executeGraph flow.loop done: %d iterations", len(items)) logger.info("executeGraph flow.loop done: %d iterations (concurrency=%d, batchMode=%s)", len(items), _loopConcurrency, _batchMode)
elif _isMergeNode(nodeType): elif _isMergeNode(nodeType):
if not _allMergePredecessorsReady(nodeId, connectionMap, nodeOutputs): if not _allMergePredecessorsReady(nodeId, connectionMap, nodeOutputs):
logger.info("executeGraph node %s (flow.merge): waiting — not all predecessors ready, deferring", nodeId) logger.info("executeGraph node %s (flow.merge): waiting — not all predecessors ready, deferring", nodeId)

View file

@ -15,6 +15,8 @@ from modules.features.graphicalEditor.portTypes import (
_normalizeError, _normalizeError,
_unwrapTransit, _unwrapTransit,
) )
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException as _SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError as _BillingContextError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -299,6 +301,8 @@ class ActionNodeExecutor:
try: try:
executor = ActionExecutor(self.services) executor = ActionExecutor(self.services)
result = await executor.executeAction(methodName, actionName, resolvedParams) result = await executor.executeAction(methodName, actionName, resolvedParams)
except (_SubscriptionInactiveException, _BillingContextError):
raise
except Exception as e: except Exception as e:
logger.exception("ActionNodeExecutor node %s FAILED: %s", nodeId, e) logger.exception("ActionNodeExecutor node %s FAILED: %s", nodeId, e)
return _normalizeError(e, outputSchema) return _normalizeError(e, outputSchema)
@ -385,4 +389,13 @@ class ActionNodeExecutor:
except (json.JSONDecodeError, TypeError, ValueError): except (json.JSONDecodeError, TypeError, ValueError):
pass pass
if outputSchema == "ConsolidateResult" and nodeType == "ai.consolidate":
data_dict = result.data if isinstance(getattr(result, "data", None), dict) else {}
cr_out = {
"result": data_dict.get("result", ""),
"mode": data_dict.get("mode", resolvedParams.get("mode", "summarize")),
"count": int(data_dict.get("count", 0)),
}
return _normalizeToSchema(cr_out, outputSchema)
return _normalizeToSchema(out, outputSchema) return _normalizeToSchema(out, outputSchema)

View file

@ -30,6 +30,8 @@ class DataExecutor:
return await self._transform(node, nodeOutputs, nodeId, inputSources) return await self._transform(node, nodeOutputs, nodeId, inputSources)
if nodeType == "data.filter": if nodeType == "data.filter":
return await self._filter(node, nodeOutputs, nodeId, inputSources) return await self._filter(node, nodeOutputs, nodeId, inputSources)
if nodeType == "data.consolidate":
return await self._consolidate(node, nodeOutputs, nodeId, inputSources)
logger.debug("DataExecutor node %s unhandled type %s", nodeId, nodeType) logger.debug("DataExecutor node %s unhandled type %s", nodeId, nodeType)
return None return None
@ -110,10 +112,15 @@ class DataExecutor:
nodeId: str, nodeId: str,
inputSources: Dict, inputSources: Dict,
) -> Any: ) -> Any:
"""Filter items by condition expression. Returns Transit envelope.""" """Filter items by condition expression and/or UDM content type. Returns Transit envelope."""
inp = self._getInput(inputSources, nodeOutputs) inp = self._getInput(inputSources, nodeOutputs)
data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
condition = (node.get("parameters") or {}).get("condition", "") params = node.get("parameters") or {}
condition = params.get("condition", "")
udmContentType = params.get("udmContentType", "")
if udmContentType and isinstance(data, dict) and data.get("children"):
data = self._filterUdmByContentType(data, udmContentType)
items = self._extractItems(data) items = self._extractItems(data)
originalCount = len(items) originalCount = len(items)
@ -137,6 +144,56 @@ class DataExecutor:
"filteredCount": len(filtered), "filteredCount": len(filtered),
}) })
async def _consolidate(
self,
node: Dict,
nodeOutputs: Dict,
nodeId: str,
inputSources: Dict,
) -> Any:
"""Deterministic consolidation: table, concat, merge, csvJoin."""
inp = self._getInput(inputSources, nodeOutputs)
data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
params = node.get("parameters") or {}
mode = params.get("mode", "table")
separator = params.get("separator", "\n")
items = self._extractItems(data) if isinstance(data, (dict, list)) else []
count = len(items)
if mode == "concat":
result = separator.join(str(i) for i in items)
elif mode == "csvJoin":
lines = []
for item in items:
if isinstance(item, dict):
lines.append(separator.join(str(v) for v in item.values()))
else:
lines.append(str(item))
result = "\n".join(lines)
elif mode == "merge":
merged: Dict = {}
for item in items:
if isinstance(item, dict):
merged.update(item)
result = merged
else:
rows = []
headers: list = []
for item in items:
if isinstance(item, dict):
for k in item:
if k not in headers:
headers.append(k)
rows.append(item)
else:
rows.append({"value": item})
if "value" not in headers:
headers.append("value")
result = {"headers": headers, "rows": rows}
return {"result": result, "mode": mode, "count": count, "_success": True}
def _getInput(self, inputSources: Dict, nodeOutputs: Dict) -> Any: def _getInput(self, inputSources: Dict, nodeOutputs: Dict) -> Any:
"""Get data from the first connected input port.""" """Get data from the first connected input port."""
if 0 not in inputSources: if 0 not in inputSources:
@ -185,6 +242,21 @@ class DataExecutor:
return True return True
def _filterUdmByContentType(self, data: Dict, contentType: str) -> Dict:
"""Filter UDM document/node, keeping only ContentBlocks matching the given contentType."""
result: list = []
children = data.get("children") or []
for child in children:
if not isinstance(child, dict):
continue
if child.get("contentType") == contentType:
result.append(child)
elif isinstance(child.get("children"), list):
for block in child["children"]:
if isinstance(block, dict) and block.get("contentType") == contentType:
result.append(block)
return {"nodes": result, "count": len(result), "_udmFiltered": True}
def _compareValues(self, left: Any, operator: str, right: Any) -> bool: def _compareValues(self, left: Any, operator: str, right: Any) -> bool:
"""Compare two values with the given operator.""" """Compare two values with the given operator."""
if operator == "eq": if operator == "eq":

View file

@ -277,17 +277,61 @@ class FlowExecutor:
return False return False
async def _loop(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any: async def _loop(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
itemsPath = (node.get("parameters") or {}).get("items", "[]") params = node.get("parameters") or {}
itemsPath = params.get("items", "[]")
level = params.get("level", "auto")
from modules.workflows.automation2.graphUtils import resolveParameterReferences from modules.workflows.automation2.graphUtils import resolveParameterReferences
items = resolveParameterReferences(itemsPath, nodeOutputs) items = resolveParameterReferences(itemsPath, nodeOutputs)
if isinstance(items, list):
if level != "auto" and isinstance(items, dict):
items = self._resolveUdmLevel(items, level)
elif isinstance(items, list):
pass pass
elif isinstance(items, dict): elif isinstance(items, dict):
children = items.get("children")
if isinstance(children, list) and children:
items = children
else:
items = [{"name": k, "value": v} for k, v in items.items()] items = [{"name": k, "value": v} for k, v in items.items()]
else: else:
items = [items] if items is not None else [] items = [items] if items is not None else []
return {"items": items, "count": len(items)} return {"items": items, "count": len(items)}
def _resolveUdmLevel(self, udm: Dict, level: str) -> list:
"""Extract items from a UDM document/node at the requested structural level."""
children = udm.get("children") or []
if level == "documents":
return [c for c in children if isinstance(c, dict) and c.get("role") in ("document", "archive")]
if level == "structuralNodes":
if udm.get("role") == "document":
return children
out = []
for child in children:
if isinstance(child, dict) and isinstance(child.get("children"), list):
out.extend(child["children"])
elif isinstance(child, dict):
out.append(child)
return out if out else children
if level == "contentBlocks":
blocks = []
nodes = children
if udm.get("role") == "document":
for sn in nodes:
if isinstance(sn, dict) and isinstance(sn.get("children"), list):
blocks.extend(sn["children"])
elif udm.get("role") in ("page", "section", "slide", "sheet"):
blocks = nodes
else:
for child in nodes:
if isinstance(child, dict) and isinstance(child.get("children"), list):
for sn in child["children"]:
if isinstance(sn, dict) and isinstance(sn.get("children"), list):
blocks.extend(sn["children"])
else:
blocks.append(sn)
return blocks
return children
async def _merge(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict, context: Dict) -> Any: async def _merge(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict, context: Dict) -> Any:
"""Merge multiple branch inputs. mode: first | all | append.""" """Merge multiple branch inputs. mode: first | all | append."""
mode = (node.get("parameters") or {}).get("mode", "first") mode = (node.get("parameters") or {}).get("mode", "first")

View file

@ -11,6 +11,7 @@ from .translateDocument import translateDocument
from .convertDocument import convertDocument from .convertDocument import convertDocument
from .generateDocument import generateDocument from .generateDocument import generateDocument
from .generateCode import generateCode from .generateCode import generateCode
from .consolidate import consolidate
__all__ = [ __all__ = [
'process', 'process',
@ -20,5 +21,6 @@ __all__ = [
'convertDocument', 'convertDocument',
'generateDocument', 'generateDocument',
'generateCode', 'generateCode',
'consolidate',
] ]

View file

@ -0,0 +1,87 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
import json
import logging
from typing import Any, Dict, List
from modules.datamodels.datamodelAi import AiCallOptions, AiCallRequest, OperationTypeEnum
from modules.datamodels.datamodelChat import ActionResult
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError
logger = logging.getLogger(__name__)
def _normalizeItems(parameters: Dict[str, Any]) -> List[Any]:
items = parameters.get("items")
if isinstance(items, list):
return items
agg = parameters.get("aggregateResult")
if isinstance(agg, dict) and isinstance(agg.get("items"), list):
return agg["items"]
return []
async def consolidate(self, parameters: Dict[str, Any]) -> ActionResult:
"""AI-assisted consolidation of aggregated loop / workflow items."""
mode = (parameters.get("mode") or "summarize").strip()
extra = (parameters.get("prompt") or "").strip()
items = _normalizeItems(parameters)
if not items:
return ActionResult.isFailure(
error="No items to consolidate. Connect an AggregateResult or pass items.",
)
try:
payload = json.dumps(items, ensure_ascii=False, default=str)[:120000]
except TypeError:
payload = str(items)[:120000]
if mode == "summarize":
instr = "Summarize the following aggregated workflow results clearly and concisely."
elif mode == "classify":
instr = (
"Classify and group the following aggregated items. "
"Output a structured summary (categories, counts, key labels)."
)
elif mode == "semanticMerge":
instr = (
"Semantically merge the following items into one coherent result. "
"Remove duplicates where appropriate."
)
else:
instr = "Process the following aggregated data according to the user instructions."
if extra:
instr += f"\n\nAdditional instructions: {extra}"
prompt = f"{instr}\n\n--- DATA ---\n{payload}"
ai_service = getattr(self.services, "ai", None)
if not ai_service:
return ActionResult.isFailure(error="AI service unavailable")
try:
req = AiCallRequest(
prompt=prompt,
options=AiCallOptions(operationType=OperationTypeEnum.DATA_ANALYSE),
)
resp = await ai_service.callAi(req)
except (SubscriptionInactiveException, BillingContextError):
raise
except Exception as e:
logger.exception("consolidate: AI call failed: %s", e)
return ActionResult.isFailure(error=str(e))
if getattr(resp, "errorCount", 0) and resp.errorCount > 0:
return ActionResult.isFailure(error=resp.content or "AI call failed")
text = (resp.content or "").strip()
return ActionResult.isSuccess(
data={
"result": text,
"mode": mode,
"count": len(items),
},
)

View file

@ -8,6 +8,8 @@ from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.datamodels.datamodelExtraction import ContentPart from modules.datamodels.datamodelExtraction import ContentPart
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -125,6 +127,8 @@ async def generateCode(self, parameters: Dict[str, Any]) -> ActionResult:
return ActionResult.isSuccess(documents=documents) return ActionResult.isSuccess(documents=documents)
except (SubscriptionInactiveException, BillingContextError):
raise
except Exception as e: except Exception as e:
logger.error(f"Error in code generation: {str(e)}") logger.error(f"Error in code generation: {str(e)}")
return ActionResult.isFailure(error=str(e)) return ActionResult.isFailure(error=str(e))

View file

@ -8,6 +8,8 @@ from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.datamodels.datamodelExtraction import ContentPart from modules.datamodels.datamodelExtraction import ContentPart
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -127,6 +129,8 @@ async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
return ActionResult.isSuccess(documents=documents) return ActionResult.isSuccess(documents=documents)
except (SubscriptionInactiveException, BillingContextError):
raise
except Exception as e: except Exception as e:
logger.error(f"Error in document generation: {str(e)}") logger.error(f"Error in document generation: {str(e)}")
return ActionResult.isFailure(error=str(e)) return ActionResult.isFailure(error=str(e))

View file

@ -10,6 +10,8 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelChat import ActionResult, ActionDocument from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, ProcessingModeEnum from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, ProcessingModeEnum
from modules.datamodels.datamodelExtraction import ContentPart from modules.datamodels.datamodelExtraction import ContentPart
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -340,6 +342,13 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
return ActionResult.isSuccess(documents=final_documents) return ActionResult.isSuccess(documents=final_documents)
except (SubscriptionInactiveException, BillingContextError):
try:
if operationId:
self.services.chat.progressLogFinish(operationId, False)
except Exception:
pass
raise
except Exception as e: except Exception as e:
logger.error(f"Error in AI processing: {str(e)}") logger.error(f"Error in AI processing: {str(e)}")

View file

@ -8,6 +8,8 @@ import json
from typing import Dict, Any from typing import Dict, Any
from modules.datamodels.datamodelChat import ActionResult, ActionDocument from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.serviceCenter import ServiceCenterContext, getService, can_access_service from modules.serviceCenter import ServiceCenterContext, getService, can_access_service
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -112,6 +114,13 @@ async def webResearch(self, parameters: Dict[str, Any]) -> ActionResult:
return ActionResult.isSuccess(documents=[actionDocument]) return ActionResult.isSuccess(documents=[actionDocument])
except (SubscriptionInactiveException, BillingContextError):
try:
if operationId:
self.services.chat.progressLogFinish(operationId, False)
except Exception:
pass
raise
except Exception as e: except Exception as e:
logger.error(f"Error in web research: {str(e)}") logger.error(f"Error in web research: {str(e)}")
try: try:

View file

@ -18,6 +18,7 @@ from .actions.translateDocument import translateDocument
from .actions.convertDocument import convertDocument from .actions.convertDocument import convertDocument
from .actions.generateDocument import generateDocument from .actions.generateDocument import generateDocument
from .actions.generateCode import generateCode from .actions.generateCode import generateCode
from .actions.consolidate import consolidate
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -317,7 +318,38 @@ class MethodAi(MethodBase):
) )
}, },
execute=generateCode.__get__(self, self.__class__) execute=generateCode.__get__(self, self.__class__)
) ),
"consolidate": WorkflowActionDefinition(
actionId="ai.consolidate",
description="AI-assisted consolidation of aggregated workflow results (summarize, classify, semantic merge)",
dynamicMode=True,
parameters={
"mode": WorkflowActionParameter(
name="mode",
type="str",
frontendType=FrontendType.SELECT,
frontendOptions=["summarize", "classify", "semanticMerge"],
required=False,
default="summarize",
description="Consolidation strategy",
),
"prompt": WorkflowActionParameter(
name="prompt",
type="str",
frontendType=FrontendType.TEXTAREA,
required=False,
description="Optional extra instructions for the LLM",
),
"items": WorkflowActionParameter(
name="items",
type="List[Any]",
frontendType=FrontendType.HIDDEN,
required=False,
description="Aggregated items (from AggregateResult wire handover)",
),
},
execute=consolidate.__get__(self, self.__class__)
),
} }
# Validate actions after definition # Validate actions after definition
@ -331,6 +363,7 @@ class MethodAi(MethodBase):
self.convertDocument = convertDocument.__get__(self, self.__class__) self.convertDocument = convertDocument.__get__(self, self.__class__)
self.generateDocument = generateDocument.__get__(self, self.__class__) self.generateDocument = generateDocument.__get__(self, self.__class__)
self.generateCode = generateCode.__get__(self, self.__class__) self.generateCode = generateCode.__get__(self, self.__class__)
self.consolidate = consolidate.__get__(self, self.__class__)
def _format_timestamp_for_filename(self) -> str: def _format_timestamp_for_filename(self) -> str:
"""Format current timestamp as YYYYMMDD-hhmmss for filenames.""" """Format current timestamp as YYYYMMDD-hhmmss for filenames."""

Some files were not shown because too many files have changed in this diff Show more