Graph and data class falignment strict
This commit is contained in:
parent
60d5062204
commit
d505ffd9cd
22 changed files with 1269 additions and 165 deletions
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
Binary file not shown.
|
|
@ -345,7 +345,11 @@ class AiOpenai(BaseConnectorAi):
|
|||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": maxTokens
|
||||
# Universal output-length cap. `max_tokens` is deprecated and
|
||||
# rejected outright by gpt-5.x / o-series; `max_completion_tokens`
|
||||
# is accepted by every current chat-completions model (legacy
|
||||
# gpt-4o, gpt-4.1, gpt-5.x, o1/o3/o4) per OpenAI API reference.
|
||||
"max_completion_tokens": maxTokens
|
||||
}
|
||||
|
||||
if modelCall.tools:
|
||||
|
|
@ -425,7 +429,10 @@ class AiOpenai(BaseConnectorAi):
|
|||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": model.maxTokens,
|
||||
# See callAiBasic for the rationale: `max_completion_tokens`
|
||||
# is the universal output-length parameter; `max_tokens` is
|
||||
# deprecated and rejected by gpt-5.x / o-series.
|
||||
"max_completion_tokens": model.maxTokens,
|
||||
"stream": True,
|
||||
}
|
||||
if modelCall.tools:
|
||||
|
|
|
|||
|
|
@ -193,7 +193,13 @@ class AuditLogEntry(BaseModel):
|
|||
success: bool = Field(
|
||||
default=True,
|
||||
description="Whether the action was successful",
|
||||
json_schema_extra={"label": "Erfolgreich", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={
|
||||
"label": "Erfolgreich",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"frontend_format_labels": ["OK", "-", "Fehler"],
|
||||
},
|
||||
)
|
||||
|
||||
errorMessage: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ Invitation model for self-service onboarding.
|
|||
Token-basierte Einladungen für neue User zu Mandanten/Features.
|
||||
"""
|
||||
|
||||
import time
|
||||
import uuid
|
||||
import secrets
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, computed_field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
|
@ -94,10 +95,26 @@ class Invitation(PowerOnModel):
|
|||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
emailSent: Optional[bool] = Field(
|
||||
emailSentFlag: Optional[bool] = Field(
|
||||
default=False,
|
||||
description="Whether the invitation email was successfully sent",
|
||||
json_schema_extra={"label": "E-Mail gesendet", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
emailSentAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when the invitation email was sent (UTC, seconds)",
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet am",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
maxUses: int = Field(
|
||||
|
|
@ -113,3 +130,33 @@ class Invitation(PowerOnModel):
|
|||
description="Current number of times this invitation has been used",
|
||||
json_schema_extra={"label": "Aktuelle Verwendungen", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Abgelaufen",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def expiredFlag(self) -> bool:
|
||||
"""True iff `expiresAt` lies in the past (UTC)."""
|
||||
if self.expiresAt is None:
|
||||
return False
|
||||
return float(self.expiresAt) < time.time()
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Verbraucht",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def usedUpFlag(self) -> bool:
|
||||
"""True iff `currentUses >= maxUses`."""
|
||||
return (self.currentUses or 0) >= (self.maxUses or 1)
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
|
|||
from modules.datamodels.datamodelBilling import BillingTransaction
|
||||
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
|
@ -197,3 +198,114 @@ class DataNeutralizerAttributesView(DataNeutralizerAttributes):
|
|||
|
||||
# Manual registration for non-PowerOnModel view
|
||||
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Role view — admin RBAC list with computed `scopeType` + `userCount`
|
||||
#
|
||||
# `scopeType` is computed in the route from (mandateId, isSystemRole). Exposed
|
||||
# here as a pure `select` field so the frontend renders the user-facing label
|
||||
# from `frontend_options` (no hardcoded mapping in the page).
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Rolle (Ansicht)")
|
||||
class RoleView(Role):
|
||||
"""Role extended with computed scope information for the admin UI."""
|
||||
|
||||
scopeType: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Computed scope: 'system' (template), 'global', or 'mandate'.",
|
||||
json_schema_extra={
|
||||
"label": "Geltungsbereich",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "system", "label": "System-Template"},
|
||||
{"value": "global", "label": "Template"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
],
|
||||
},
|
||||
)
|
||||
userCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of users assigned to this role (via UserMandateRole).",
|
||||
json_schema_extra={
|
||||
"label": "Benutzer",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Automation Workflow — dashboard view with synthesized fields
|
||||
# ============================================================================
|
||||
|
||||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
|
||||
|
||||
@i18nModel("Workflow (Ansicht)")
|
||||
class Automation2WorkflowView(AutoWorkflow):
|
||||
"""AutoWorkflow extended with computed dashboard fields.
|
||||
|
||||
Used exclusively for /api/attributes/ so the frontend can resolve column
|
||||
types for the workflow dashboard table (FormGeneratorTable).
|
||||
"""
|
||||
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Record creation timestamp (UTC)",
|
||||
json_schema_extra={
|
||||
"label": "Erstellt",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
lastStartedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp of the most recent workflow run start",
|
||||
json_schema_extra={
|
||||
"label": "Zuletzt gestartet",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
runCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of runs for this workflow",
|
||||
json_schema_extra={
|
||||
"label": "Laeufe",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
mandateLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code of the owning instance",
|
||||
json_schema_extra={"label": "Feature", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
isRunning: Optional[bool] = Field(
|
||||
default=None,
|
||||
description="Whether the workflow currently has an active run",
|
||||
json_schema_extra={
|
||||
"label": "Läuft",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -98,7 +98,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
isTemplate: bool = Field(
|
||||
default=False,
|
||||
description="Whether this workflow is a template",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Ist Vorlage"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Ist Vorlage",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
templateSourceId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -108,18 +113,43 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Quelle",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
# Soft FK: holds either a real AutoWorkflow.id (UUID, when copied
|
||||
# from a stored template) OR an in-code sentinel like
|
||||
# "trustee-receipt-import" (when bootstrapped from
|
||||
# featureModule.getTemplateWorkflows()). Sentinel values do not
|
||||
# exist as DB rows by design — orphan cleanup MUST skip this column.
|
||||
"fk_target": {
|
||||
"db": "poweron_graphicaleditor",
|
||||
"table": "AutoWorkflow",
|
||||
"labelField": "label",
|
||||
"softFk": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
templateScope: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Template scope: user, instance, mandate, system (AutoTemplateScope)",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Vorlagen-Bereich"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Bereich",
|
||||
"frontend_options": [
|
||||
{"value": "user", "label": "Meine"},
|
||||
{"value": "instance", "label": "Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "system", "label": "System"},
|
||||
],
|
||||
},
|
||||
)
|
||||
sharedReadOnly: bool = Field(
|
||||
default=False,
|
||||
description="If true, shared template is read-only for non-owners",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Freigabe nur-lesen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Freigabe nur-lesen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
currentVersionId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -135,7 +165,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
active: bool = Field(
|
||||
default=True,
|
||||
description="Whether workflow is active",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Aktiv"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Aktiv",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
eventId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -145,7 +180,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
notifyOnFailure: bool = Field(
|
||||
default=True,
|
||||
description="Send notification (in-app + email) when a run fails",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Bei Fehler benachrichtigen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Bei Fehler benachrichtigen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
# Legacy fields kept for backward compatibility during transition
|
||||
graph: Dict[str, Any] = Field(
|
||||
|
|
@ -189,7 +229,16 @@ class AutoVersion(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoWorkflowStatus.DRAFT.value,
|
||||
description="Version status: draft, published, archived",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "draft", "label": "Entwurf"},
|
||||
{"value": "published", "label": "Veröffentlicht"},
|
||||
{"value": "archived", "label": "Archiviert"},
|
||||
],
|
||||
},
|
||||
)
|
||||
graph: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -281,7 +330,18 @@ class AutoRun(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoRunStatus.RUNNING.value,
|
||||
description="Status: running, paused, completed, failed, cancelled",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "paused", "label": "Pausiert"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
trigger: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -362,7 +422,18 @@ class AutoStepLog(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoStepStatus.PENDING.value,
|
||||
description="Step status: pending, running, completed, failed, skipped",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "skipped", "label": "Übersprungen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
inputSnapshot: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -464,7 +535,17 @@ class AutoTask(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoTaskStatus.PENDING.value,
|
||||
description="Status: pending, completed, cancelled, expired",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
{"value": "expired", "label": "Abgelaufen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
result: Optional[Dict[str, Any]] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ Uses PostgreSQL poweron_graphicaleditor database (Greenfield).
|
|||
|
||||
import base64
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
|
|
@ -278,6 +279,7 @@ class GraphicalEditorObjects:
|
|||
"workflowId": workflowId,
|
||||
"label": label,
|
||||
"status": "running",
|
||||
"startedAt": time.time(),
|
||||
"nodeOutputs": _make_json_serializable(nodeOutputs or {}),
|
||||
"currentNodeId": None,
|
||||
"context": ctx,
|
||||
|
|
@ -314,6 +316,8 @@ class GraphicalEditorObjects:
|
|||
updates = {}
|
||||
if status is not None:
|
||||
updates["status"] = status
|
||||
if status in ("completed", "failed", "stopped", "cancelled") and not run.get("completedAt"):
|
||||
updates["completedAt"] = time.time()
|
||||
if nodeOutputs is not None:
|
||||
updates["nodeOutputs"] = _make_json_serializable(nodeOutputs)
|
||||
if currentNodeId is not None:
|
||||
|
|
|
|||
|
|
@ -526,9 +526,17 @@ def get_templates(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
scope: Optional[str] = Query(None, description="Filter by scope: user, instance, mandate, system"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List workflow templates with optional pagination."""
|
||||
"""List workflow templates with optional pagination.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
templates = iface.getTemplates(scope=scope)
|
||||
|
|
@ -537,6 +545,16 @@ def get_templates(
|
|||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
enrichRowsWithFkLabels(templates, AutoWorkflow)
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(templates, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(templates, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -1242,9 +1260,17 @@ def get_workflows(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List all workflows for this feature instance."""
|
||||
"""List all workflows for this feature instance.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters (for "select all")
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
items = iface.getWorkflows(active=active)
|
||||
|
|
@ -1272,10 +1298,19 @@ def get_workflows(
|
|||
"runStatus": active_run.get("status") if active_run else None,
|
||||
"stuckAtNodeId": stuck_at_node_id,
|
||||
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
||||
"createdAt": wf.get("sysCreatedAt"),
|
||||
"lastStartedAt": last_started_at,
|
||||
})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(enriched, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(enriched, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -837,6 +837,43 @@ class TrusteeAccountingConfig(PowerOnModel):
|
|||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt", "frontend_type": "timestamp"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Position (Ansicht)")
|
||||
class TrusteePositionView(TrusteePosition):
|
||||
"""``TrusteePosition`` extended with computed display fields for the table.
|
||||
|
||||
The route enriches each row with the latest accounting-sync state so the
|
||||
frontend can render `syncStatus` (select) + `syncErrorMessage` (text) via
|
||||
`resolveColumnTypes` instead of hardcoded label maps in the page.
|
||||
"""
|
||||
|
||||
syncStatus: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Latest accounting-sync status for this position.",
|
||||
json_schema_extra={
|
||||
"label": "Synchronisierungsstatus",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Ausstehend"},
|
||||
{"value": "synced", "label": "Synchronisiert"},
|
||||
{"value": "error", "label": "Fehler"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
syncErrorMessage: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Latest accounting-sync error message (if syncStatus == 'error').",
|
||||
json_schema_extra={
|
||||
"label": "Fehlermeldung",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@i18nModel("Buchhaltungs-Synchronisation")
|
||||
class TrusteeAccountingSync(PowerOnModel):
|
||||
"""Tracks which position was synced to which external system and when.
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ from .datamodelFeatureTrustee import (
|
|||
TrusteeContract,
|
||||
TrusteeDocument,
|
||||
TrusteePosition,
|
||||
TrusteePositionView,
|
||||
TrusteeDataAccount,
|
||||
TrusteeDataJournalEntry,
|
||||
TrusteeDataJournalLine,
|
||||
|
|
@ -209,6 +210,7 @@ _TRUSTEE_ENTITY_MODELS = {
|
|||
"TrusteeContract": TrusteeContract,
|
||||
"TrusteeDocument": TrusteeDocument,
|
||||
"TrusteePosition": TrusteePosition,
|
||||
"TrusteePositionView": TrusteePositionView,
|
||||
# Read-only sync tables (TrusteeData*) and accounting bookkeeping
|
||||
"TrusteeDataAccount": TrusteeDataAccount,
|
||||
"TrusteeDataJournalEntry": TrusteeDataJournalEntry,
|
||||
|
|
@ -979,29 +981,16 @@ def get_documents(
|
|||
|
||||
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee documents."""
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory, handleFilterValuesInMemory, enrichRowsWithFkLabels
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
from fastapi.responses import JSONResponse
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteeDocument,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
result = interface.getAllDocuments(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
enrichRowsWithFkLabels(items, TrusteeDocument)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
return handleIdsInMemory(items, pagination)
|
||||
|
|
@ -1181,6 +1170,51 @@ def delete_document(
|
|||
|
||||
# ===== Position Routes =====
|
||||
|
||||
def _buildSyncStatusByPosition(interface, instanceId: str) -> Dict[str, Dict[str, Optional[str]]]:
|
||||
"""Build a positionId -> {syncStatus, syncErrorMessage} map from
|
||||
`TrusteeAccountingSync` records for the given feature instance.
|
||||
|
||||
Preference order matches the historic UI logic: ``synced`` overrides
|
||||
``error``, so a successful retry hides an old failure. Any other status
|
||||
(`pending`, `cancelled`, ...) is kept verbatim.
|
||||
"""
|
||||
from .datamodelFeatureTrustee import TrusteeAccountingSync
|
||||
|
||||
syncRecords = interface.db.getRecordset(
|
||||
TrusteeAccountingSync, recordFilter={"featureInstanceId": instanceId}
|
||||
) or []
|
||||
|
||||
syncMap: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
for rec in syncRecords:
|
||||
positionId = rec.get("positionId")
|
||||
if not positionId:
|
||||
continue
|
||||
status = rec.get("syncStatus")
|
||||
errorMessage = rec.get("errorMessage")
|
||||
current = syncMap.get(positionId)
|
||||
prefer = (
|
||||
current is None
|
||||
or status == "synced"
|
||||
or (current.get("syncStatus") != "synced" and status == "error")
|
||||
)
|
||||
if prefer:
|
||||
syncMap[positionId] = {
|
||||
"syncStatus": status,
|
||||
"syncErrorMessage": errorMessage,
|
||||
}
|
||||
return syncMap
|
||||
|
||||
|
||||
def _enrichPositionsWithSyncStatus(items: List[Dict[str, Any]], interface, instanceId: str) -> List[Dict[str, Any]]:
|
||||
"""In-place enrich each position dict with `syncStatus` + `syncErrorMessage`."""
|
||||
syncMap = _buildSyncStatusByPosition(interface, instanceId)
|
||||
for row in items:
|
||||
info = syncMap.get(row.get("id")) or {}
|
||||
row["syncStatus"] = info.get("syncStatus")
|
||||
row["syncErrorMessage"] = info.get("syncErrorMessage")
|
||||
return items
|
||||
|
||||
|
||||
@router.get("/{instanceId}/positions")
|
||||
@limiter.limit("30/minute")
|
||||
def get_positions(
|
||||
|
|
@ -1205,8 +1239,10 @@ def get_positions(
|
|||
return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
items = _itemsToDicts(result.items)
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return {
|
||||
"items": _itemsToDicts(result.items),
|
||||
"items": items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
|
|
@ -1216,37 +1252,30 @@ def get_positions(
|
|||
filters=paginationParams.filters if paginationParams else None
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
return {"items": _itemsToDicts(items), "pagination": None}
|
||||
rawItems = result if isinstance(result, list) else result.items
|
||||
items = _itemsToDicts(rawItems)
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return {"items": items, "pagination": None}
|
||||
|
||||
|
||||
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee positions."""
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory, handleFilterValuesInMemory, enrichRowsWithFkLabels
|
||||
from .datamodelFeatureTrustee import TrusteePositionView
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
from fastapi.responses import JSONResponse
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteePosition,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
# Use the view model so FK labels for the synthetic columns also resolve.
|
||||
enrichRowsWithFkLabels(items, TrusteePositionView)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
if mode == "ids":
|
||||
result = interface.getAllPositions(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
|
||||
|
||||
|
|
@ -2402,14 +2431,12 @@ def _paginatedReadEndpoint(
|
|||
"""
|
||||
from modules.interfaces.interfaceRbac import (
|
||||
getRecordsetPaginatedWithRBAC,
|
||||
getDistinctColumnValuesWithRBAC,
|
||||
)
|
||||
from modules.routes.routeHelpers import (
|
||||
handleIdsInMemory,
|
||||
parseCrossFilterPagination,
|
||||
handleFilterValuesInMemory,
|
||||
enrichRowsWithFkLabels,
|
||||
)
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
|
|
@ -2417,19 +2444,21 @@ def _paginatedReadEndpoint(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
result = getRecordsetPaginatedWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
pagination=None,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
rawItems = result.items if hasattr(result, "items") else result
|
||||
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
|
||||
featureResolvers = _buildFeatureInternalResolvers(modelClass, interface.db)
|
||||
enrichRowsWithFkLabels(items, modelClass, extraResolvers=featureResolvers or None)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
result = getRecordsetPaginatedWithRBAC(
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from modules.system.databaseHealth import (
|
|||
_cleanAllOrphans,
|
||||
_cleanOrphans,
|
||||
_getTableStats,
|
||||
_isUserIdFk,
|
||||
_listOrphans,
|
||||
_scanOrphans,
|
||||
)
|
||||
|
|
@ -49,6 +50,14 @@ class OrphanCleanAllRequest(BaseModel):
|
|||
False,
|
||||
description="Override safety guards on every relationship. Use with extreme care.",
|
||||
)
|
||||
excludeUserFks: bool = Field(
|
||||
False,
|
||||
description=(
|
||||
"Skip FK relationships pointing at UserInDB.id. Deleted-user remnants "
|
||||
"(audit / billing / membership rows) are handled by a dedicated purge "
|
||||
"workflow and should not be touched by generic FK cleanup."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
|
|
@ -68,10 +77,18 @@ def getDatabaseTableStats(
|
|||
def getDatabaseOrphans(
|
||||
request: Request,
|
||||
db: Optional[str] = None,
|
||||
excludeUserFks: bool = False,
|
||||
currentUser: User = Depends(requireSysAdmin),
|
||||
) -> Dict[str, Any]:
|
||||
"""FK orphan scan (optional filter by source database name)."""
|
||||
"""FK orphan scan (optional filter by source database name).
|
||||
|
||||
When ``excludeUserFks=true``, results targeting ``UserInDB.id`` are
|
||||
omitted from the response so the SysAdmin UI can keep deleted-user
|
||||
remnants visually separate from real FK drift.
|
||||
"""
|
||||
rows = _scanOrphans(dbFilter=db)
|
||||
if excludeUserFks:
|
||||
rows = [r for r in rows if not _isUserIdFk(r.get("targetTable", ""), r.get("targetColumn", ""))]
|
||||
return {"orphans": rows}
|
||||
|
||||
|
||||
|
|
@ -161,17 +178,19 @@ def postDatabaseOrphansCleanAll(
|
|||
`skipped` (safety guard triggered, no force), or `error` (other failure).
|
||||
"""
|
||||
force = bool(body.force) if body is not None else False
|
||||
results: List[dict] = _cleanAllOrphans(force=force)
|
||||
excludeUserFks = bool(body.excludeUserFks) if body is not None else False
|
||||
results: List[dict] = _cleanAllOrphans(force=force, excludeUserFks=excludeUserFks)
|
||||
skipped = sum(1 for r in results if "skipped" in r)
|
||||
errored = sum(1 for r in results if "error" in r)
|
||||
deletedTotal = sum(int(r.get("deleted", 0)) for r in results)
|
||||
logger.info(
|
||||
"SysAdmin orphan clean-all: user=%s batches=%s deleted=%s skipped=%s errored=%s force=%s",
|
||||
"SysAdmin orphan clean-all: user=%s batches=%s deleted=%s skipped=%s errored=%s force=%s excludeUserFks=%s",
|
||||
currentUser.username,
|
||||
len(results),
|
||||
deletedTotal,
|
||||
skipped,
|
||||
errored,
|
||||
force,
|
||||
excludeUserFks,
|
||||
)
|
||||
return {"results": results, "skipped": skipped, "errored": errored, "deleted": deletedTotal}
|
||||
|
|
|
|||
|
|
@ -109,9 +109,8 @@ def _enrichUserAndInstanceLabels(
|
|||
) -> None:
|
||||
"""Resolve userId -> username and featureInstanceId -> label in-place.
|
||||
|
||||
Uses the central resolvers from routeHelpers. Returns None (not the raw ID)
|
||||
for unresolvable entries so the frontend can distinguish "resolved" from
|
||||
"missing".
|
||||
Uses the central resolvers from routeHelpers. Falls back to ``NA(<id>)``
|
||||
for unresolvable entries so filter dropdowns still show an entry.
|
||||
"""
|
||||
from modules.routes.routeHelpers import resolveUserLabels, resolveInstanceLabels
|
||||
|
||||
|
|
@ -129,10 +128,10 @@ def _enrichUserAndInstanceLabels(
|
|||
for r in items:
|
||||
uid = r.get(userKey)
|
||||
if uid and not r.get(usernameKey) and uid in userMap:
|
||||
r[usernameKey] = userMap[uid]
|
||||
r[usernameKey] = userMap.get(uid) or f"NA({uid})"
|
||||
iid = r.get(instanceKey)
|
||||
if iid:
|
||||
r[instanceLabelKey] = instanceMap.get(iid)
|
||||
r[instanceLabelKey] = instanceMap.get(iid) or f"NA({iid})"
|
||||
|
||||
|
||||
def _requireAuditAccess(context: RequestContext):
|
||||
|
|
|
|||
|
|
@ -260,7 +260,7 @@ def get_files(
|
|||
|
||||
from modules.routes.routeHelpers import (
|
||||
handleIdsMode,
|
||||
parseCrossFilterPagination,
|
||||
handleFilterValuesInMemory,
|
||||
)
|
||||
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
|
|
@ -272,13 +272,11 @@ def get_files(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
crossPagination = parseCrossFilterPagination(column, pagination)
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
from fastapi.responses import JSONResponse
|
||||
values = managementInterface.db.getDistinctColumnValues(
|
||||
FileItem, column, crossPagination, recordFilter
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
allFiles = managementInterface.getAllFiles()
|
||||
items = allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])
|
||||
itemDicts = [f.model_dump() if hasattr(f, "model_dump") else (dict(f) if not isinstance(f, dict) else f) for f in items]
|
||||
enrichRowsWithFkLabels(itemDicts, FileItem)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
|
|
|
|||
|
|
@ -94,7 +94,8 @@ class InvitationResponse(BaseModel):
|
|||
maxUses: int
|
||||
currentUses: int
|
||||
inviteUrl: str # Full URL for the invitation
|
||||
emailSent: bool = False # Whether invitation email was sent
|
||||
emailSentFlag: bool = False # Whether invitation email was sent
|
||||
emailSentAt: Optional[float] = None # Timestamp when invitation email was sent (UTC)
|
||||
|
||||
|
||||
class InvitationValidation(BaseModel):
|
||||
|
|
@ -236,7 +237,8 @@ def create_invitation(
|
|||
maxUses=data.maxUses,
|
||||
currentUses=0,
|
||||
inviteUrl="",
|
||||
emailSent=False
|
||||
emailSentFlag=False,
|
||||
emailSentAt=None,
|
||||
)
|
||||
else:
|
||||
existing_membership = rootInterface.getUserMandate(target_user_id, mandateId)
|
||||
|
|
@ -259,7 +261,8 @@ def create_invitation(
|
|||
maxUses=data.maxUses,
|
||||
currentUses=0,
|
||||
inviteUrl="",
|
||||
emailSent=False
|
||||
emailSentFlag=False,
|
||||
emailSentAt=None,
|
||||
)
|
||||
|
||||
invitation = Invitation(
|
||||
|
|
@ -281,7 +284,8 @@ def create_invitation(
|
|||
inviteUrl = f"{baseUrl}/invite/{invitation.token}"
|
||||
|
||||
# Send email if email address is provided
|
||||
emailSent = False
|
||||
emailSentFlag = False
|
||||
emailSentAt: Optional[float] = None
|
||||
if email_val:
|
||||
try:
|
||||
from modules.connectors.connectorMessagingEmail import ConnectorMessagingEmail
|
||||
|
|
@ -319,20 +323,22 @@ def create_invitation(
|
|||
subject=emailSubject,
|
||||
message=emailBody
|
||||
)
|
||||
emailSent = True
|
||||
emailSentFlag = True
|
||||
emailSentAt = getUtcTimestamp()
|
||||
logger.info(f"Invitation email sent to {email_val} for user {target_username_val or 'email-only'}")
|
||||
except Exception as emailError:
|
||||
logger.warning(f"Failed to send invitation email to {email_val}: {emailError}")
|
||||
# Don't fail the invitation creation if email fails
|
||||
|
||||
# Update the invitation record with emailSent status
|
||||
if emailSent:
|
||||
# Persist email-sent state on the invitation record
|
||||
if emailSentFlag:
|
||||
rootInterface.db.recordModify(
|
||||
Invitation,
|
||||
createdRecord.get("id"),
|
||||
{"emailSent": True}
|
||||
{"emailSentFlag": True, "emailSentAt": emailSentAt},
|
||||
)
|
||||
createdRecord["emailSent"] = True
|
||||
createdRecord["emailSentFlag"] = True
|
||||
createdRecord["emailSentAt"] = emailSentAt
|
||||
|
||||
# If the target user already exists (identified by username), create an in-app notification
|
||||
# Only look up by username - email is not used for "existing user" since new users are invited by email
|
||||
|
|
@ -384,7 +390,8 @@ def create_invitation(
|
|||
maxUses=createdRecord.get("maxUses", 1),
|
||||
currentUses=createdRecord.get("currentUses", 0),
|
||||
inviteUrl=inviteUrl,
|
||||
emailSent=emailSent
|
||||
emailSentFlag=emailSentFlag,
|
||||
emailSentAt=emailSentAt,
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
|
|
@ -415,7 +422,9 @@ def list_invitations(
|
|||
Requires Mandate-Admin role. Returns all invitations created for this mandate.
|
||||
|
||||
NOTE: Cannot use db.getRecordsetPaginated() because:
|
||||
- Computed status fields (isExpired, isUsedUp) are derived in-memory
|
||||
- Computed status fields (`expiredFlag`, `usedUpFlag`) are derived from
|
||||
Pydantic computed fields (`Invitation.expiredFlag` / `usedUpFlag`),
|
||||
`model_dump()` evaluates them on every read.
|
||||
- Filtering by revoked/used/expired requires post-fetch logic
|
||||
- Invitation volume per mandate is typically low (< 100)
|
||||
When this endpoint needs FormGeneratorTable pagination, add PaginatedResponse
|
||||
|
|
@ -455,11 +464,11 @@ def list_invitations(
|
|||
continue
|
||||
baseUrl = frontendUrl.rstrip("/") if frontendUrl else ""
|
||||
inviteUrl = f"{baseUrl}/invite/{inv.token}" if baseUrl else ""
|
||||
# `model_dump()` includes the computed fields (`expiredFlag`, `usedUpFlag`)
|
||||
# defined on Invitation — no manual computation needed here.
|
||||
items.append({
|
||||
**inv.model_dump(),
|
||||
"inviteUrl": inviteUrl,
|
||||
"isExpired": expiresAt < currentTime,
|
||||
"isUsedUp": currentUses >= maxUses
|
||||
})
|
||||
return items
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import asyncio
|
|||
import json
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
import time
|
||||
from typing import Optional, List
|
||||
from fastapi import APIRouter, Depends, Request, Query, Path, HTTPException
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
|
@ -183,6 +185,331 @@ def _parsePaginationOr400(pagination: Optional[str]) -> Optional[PaginationParam
|
|||
)
|
||||
|
||||
|
||||
_RUN_STATS_SUBQUERY = """
|
||||
(
|
||||
SELECT s."workflowId" AS "workflowId",
|
||||
MAX(COALESCE(s."startedAt", s."sysCreatedAt")) AS "lastStartedAt",
|
||||
COUNT(s."id")::bigint AS "runCount",
|
||||
MAX(CASE WHEN s."status" IN ('running', 'paused') THEN s."id" END) AS "activeRunId"
|
||||
FROM "AutoRun" s
|
||||
GROUP BY s."workflowId"
|
||||
) rs
|
||||
"""
|
||||
|
||||
|
||||
def _firstFkSortFieldForWorkflows(pagination) -> Optional[str]:
|
||||
"""First sort field that requires FK label resolution (cross-DB), or None."""
|
||||
from modules.routes.routeHelpers import _buildLabelResolversFromModel
|
||||
if not pagination or not pagination.sort:
|
||||
return None
|
||||
resolvers = _buildLabelResolversFromModel(AutoWorkflow)
|
||||
if not resolvers:
|
||||
return None
|
||||
for sf in pagination.sort:
|
||||
sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None)
|
||||
if sfField and sfField in resolvers:
|
||||
return sfField
|
||||
return None
|
||||
|
||||
|
||||
def _batchRunStatsForWorkflowIds(db: DatabaseConnector, workflowIds: List[str]) -> dict:
|
||||
"""One grouped query: lastStartedAt, runCount, activeRunId per workflow."""
|
||||
if not workflowIds or not db._ensureTableExists(AutoRun):
|
||||
return {}
|
||||
db._ensure_connection()
|
||||
sql = """
|
||||
SELECT "workflowId",
|
||||
MAX(COALESCE("startedAt", "sysCreatedAt")) AS "lastStartedAt",
|
||||
COUNT("id")::bigint AS "runCount",
|
||||
MAX(CASE WHEN "status" IN ('running', 'paused') THEN "id" END) AS "activeRunId"
|
||||
FROM "AutoRun"
|
||||
WHERE "workflowId" = ANY(%s)
|
||||
GROUP BY "workflowId"
|
||||
"""
|
||||
out: dict = {}
|
||||
with db.connection.cursor() as cursor:
|
||||
cursor.execute(sql, (workflowIds,))
|
||||
for row in cursor.fetchall():
|
||||
r = dict(row)
|
||||
wid = r.get("workflowId")
|
||||
if wid:
|
||||
out[str(wid)] = r
|
||||
return out
|
||||
|
||||
|
||||
def _listingColSql(key: str, wfFieldNames: set) -> Optional[str]:
|
||||
if key == "lastStartedAt":
|
||||
return 'rs."lastStartedAt"'
|
||||
if key == "runCount":
|
||||
return 'COALESCE(rs."runCount", 0::bigint)'
|
||||
if key == "isRunning":
|
||||
return '(rs."activeRunId" IS NOT NULL)'
|
||||
if key in wfFieldNames:
|
||||
return f'w."{key}"'
|
||||
return None
|
||||
|
||||
|
||||
def _listingOrderExpr(key: str, wfFieldNames: set, wfFields: dict) -> Optional[str]:
|
||||
if key == "lastStartedAt":
|
||||
return 'rs."lastStartedAt"'
|
||||
if key == "runCount":
|
||||
return 'COALESCE(rs."runCount", 0::bigint)'
|
||||
if key == "isRunning":
|
||||
return 'CASE WHEN rs."activeRunId" IS NOT NULL THEN 1 ELSE 0 END'
|
||||
if key in wfFieldNames:
|
||||
colType = wfFields.get(key, "TEXT")
|
||||
if colType == "BOOLEAN":
|
||||
return f'COALESCE(w."{key}", FALSE)'
|
||||
return f'w."{key}"'
|
||||
return None
|
||||
|
||||
|
||||
def _appendJoinedListingFilters(whereParts: list, values: list, pagination, wfFields: dict) -> None:
|
||||
"""Append WHERE fragments for joined workflow listing (w + rs)."""
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
|
||||
wfFieldNames = set(wfFields.keys())
|
||||
validCols = wfFieldNames | {"lastStartedAt", "runCount", "isRunning"}
|
||||
|
||||
if not pagination or not pagination.filters:
|
||||
return
|
||||
|
||||
for key, val in pagination.filters.items():
|
||||
if key == "search" and isinstance(val, str) and val.strip():
|
||||
term = f"%{val.strip()}%"
|
||||
textCols = [c for c, t in wfFields.items() if t == "TEXT"]
|
||||
if textCols:
|
||||
orParts = [f'COALESCE(w."{c}"::TEXT, \'\') ILIKE %s' for c in textCols]
|
||||
whereParts.append(f"({' OR '.join(orParts)})")
|
||||
values.extend([term] * len(textCols))
|
||||
continue
|
||||
|
||||
if key not in validCols:
|
||||
continue
|
||||
|
||||
if key == "isRunning":
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
isTrue = str(v).lower() == "true"
|
||||
if op in ("equals", "eq"):
|
||||
whereParts.append('(rs."activeRunId" IS NOT NULL)' if isTrue else '(rs."activeRunId" IS NULL)')
|
||||
elif val is None:
|
||||
whereParts.append('(rs."activeRunId" IS NULL)')
|
||||
else:
|
||||
whereParts.append(
|
||||
'(rs."activeRunId" IS NOT NULL)' if str(val).lower() == "true" else '(rs."activeRunId" IS NULL)'
|
||||
)
|
||||
continue
|
||||
|
||||
colRef = _listingColSql(key, wfFieldNames)
|
||||
if not colRef:
|
||||
continue
|
||||
|
||||
colType = wfFields.get(key, "TEXT") if key in wfFieldNames else (
|
||||
"DOUBLE PRECISION" if key == "lastStartedAt" else "BIGINT" if key == "runCount" else "TEXT"
|
||||
)
|
||||
|
||||
if val is None:
|
||||
if key == "lastStartedAt":
|
||||
whereParts.append(f'({colRef} IS NULL)')
|
||||
elif key == "runCount":
|
||||
whereParts.append(f'({colRef} = 0)')
|
||||
else:
|
||||
whereParts.append(f'({colRef} IS NULL OR {colRef}::TEXT = \'\')')
|
||||
continue
|
||||
|
||||
if not isinstance(val, dict):
|
||||
if colType == "BOOLEAN" or key == "isRunning":
|
||||
whereParts.append(f'COALESCE({colRef}, FALSE) = %s')
|
||||
values.append(str(val).lower() == "true")
|
||||
else:
|
||||
whereParts.append(f'{colRef}::TEXT ILIKE %s')
|
||||
values.append(str(val))
|
||||
continue
|
||||
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
if op in ("equals", "eq"):
|
||||
if colType == "BOOLEAN":
|
||||
whereParts.append(f'COALESCE({colRef}, FALSE) = %s')
|
||||
values.append(str(v).lower() == "true")
|
||||
else:
|
||||
whereParts.append(f'{colRef}::TEXT = %s')
|
||||
values.append(str(v))
|
||||
elif op == "contains":
|
||||
whereParts.append(f'{colRef}::TEXT ILIKE %s')
|
||||
values.append(f"%{v}%")
|
||||
elif op == "startsWith":
|
||||
whereParts.append(f'{colRef}::TEXT ILIKE %s')
|
||||
values.append(f"{v}%")
|
||||
elif op == "endsWith":
|
||||
whereParts.append(f'{colRef}::TEXT ILIKE %s')
|
||||
values.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
if colType in ("INTEGER", "DOUBLE PRECISION", "BIGINT") or key in ("lastStartedAt", "runCount"):
|
||||
try:
|
||||
whereParts.append(f'{colRef}::double precision {sqlOp} %s')
|
||||
values.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
whereParts.append(f'{colRef}::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
if not fromVal and not toVal:
|
||||
continue
|
||||
isNumericCol = colType in ("INTEGER", "DOUBLE PRECISION", "BIGINT") or key in ("lastStartedAt", "runCount")
|
||||
isDateVal = bool(fromVal and re.match(r"^\d{4}-\d{2}-\d{2}$", str(fromVal))) or bool(
|
||||
toVal and re.match(r"^\d{4}-\d{2}-\d{2}$", str(toVal))
|
||||
)
|
||||
if isNumericCol and isDateVal:
|
||||
if fromVal and toVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereParts.append(f"({colRef} >= %s AND {colRef} <= %s)")
|
||||
values.extend([fromTs, toTs])
|
||||
elif fromVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
whereParts.append(f"({colRef} >= %s)")
|
||||
values.append(fromTs)
|
||||
else:
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereParts.append(f"({colRef} <= %s)")
|
||||
values.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
whereParts.append(
|
||||
f"({colRef}::double precision >= %s AND {colRef}::double precision <= %s)"
|
||||
)
|
||||
values.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
whereParts.append(f"{colRef}::double precision >= %s")
|
||||
values.append(float(fromVal))
|
||||
elif toVal:
|
||||
whereParts.append(f"{colRef}::double precision <= %s")
|
||||
values.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
whereParts.append(f"({colRef}::TEXT >= %s AND {colRef}::TEXT <= %s)")
|
||||
values.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereParts.append(f"{colRef}::TEXT >= %s")
|
||||
values.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereParts.append(f"{colRef}::TEXT <= %s")
|
||||
values.append(str(toVal))
|
||||
|
||||
|
||||
def _buildJoinedWorkflowWhereOrderLimit(
|
||||
recordFilter: dict,
|
||||
pagination,
|
||||
wfFields: dict,
|
||||
) -> tuple:
|
||||
"""WHERE / ORDER BY / LIMIT for joined AutoWorkflow + run stats listing."""
|
||||
wfFieldNames = set(wfFields.keys())
|
||||
whereParts: list = []
|
||||
values: list = []
|
||||
|
||||
for field, value in (recordFilter or {}).items():
|
||||
if value is None:
|
||||
whereParts.append(f'w."{field}" IS NULL')
|
||||
elif isinstance(value, list):
|
||||
whereParts.append(f'w."{field}" = ANY(%s)')
|
||||
values.append(value)
|
||||
else:
|
||||
whereParts.append(f'w."{field}" = %s')
|
||||
values.append(value)
|
||||
|
||||
_appendJoinedListingFilters(whereParts, values, pagination, wfFields)
|
||||
|
||||
whereClause = " WHERE " + " AND ".join(whereParts) if whereParts else ""
|
||||
|
||||
orderParts: list = []
|
||||
if pagination and pagination.sort:
|
||||
for sf in pagination.sort:
|
||||
sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None)
|
||||
sfDir = sf.get("direction", "asc") if isinstance(sf, dict) else getattr(sf, "direction", "asc")
|
||||
if not sfField:
|
||||
continue
|
||||
expr = _listingOrderExpr(sfField, wfFieldNames, wfFields)
|
||||
if not expr:
|
||||
continue
|
||||
direction = "DESC" if str(sfDir).lower() == "desc" else "ASC"
|
||||
orderParts.append(f"{expr} {direction} NULLS LAST")
|
||||
if not orderParts:
|
||||
orderParts.append('w."sysCreatedAt" DESC NULLS LAST')
|
||||
|
||||
orderClause = " ORDER BY " + ", ".join(orderParts)
|
||||
|
||||
limitClause = ""
|
||||
if pagination:
|
||||
offset = (pagination.page - 1) * pagination.pageSize
|
||||
limitClause = f" LIMIT {pagination.pageSize} OFFSET {offset}"
|
||||
|
||||
return whereClause, orderClause, limitClause, values
|
||||
|
||||
|
||||
def _getWorkflowsJoinedPaginated(
|
||||
db: DatabaseConnector,
|
||||
recordFilter: dict,
|
||||
paginationParams: PaginationParams,
|
||||
) -> dict:
|
||||
"""SQL listing: AutoWorkflow LEFT JOIN aggregated AutoRun stats (one query + count)."""
|
||||
from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
|
||||
|
||||
wfFields = getModelFields(AutoWorkflow)
|
||||
whereClause, orderClause, limitClause, values = _buildJoinedWorkflowWhereOrderLimit(
|
||||
recordFilter, paginationParams, wfFields,
|
||||
)
|
||||
countValues = list(values)
|
||||
|
||||
fromSql = f'"AutoWorkflow" w LEFT JOIN {_RUN_STATS_SUBQUERY.strip()} ON rs."workflowId" = w."id"'
|
||||
|
||||
countSql = f"SELECT COUNT(*) AS cnt FROM {fromSql}{whereClause}"
|
||||
dataSql = f"SELECT w.*, rs.\"lastStartedAt\", rs.\"runCount\", rs.\"activeRunId\" FROM {fromSql}{whereClause}{orderClause}{limitClause}"
|
||||
|
||||
db._ensure_connection()
|
||||
with db.connection.cursor() as cursor:
|
||||
cursor.execute(countSql, countValues)
|
||||
totalItems = int(cursor.fetchone()["cnt"])
|
||||
|
||||
cursor.execute(dataSql, values)
|
||||
rawRows = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
pageSize = paginationParams.pageSize if paginationParams else max(totalItems, 1)
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
|
||||
modelFields = AutoWorkflow.model_fields
|
||||
for record in rawRows:
|
||||
parseRecordFields(record, wfFields, "table AutoWorkflow joined listing")
|
||||
for fieldName, fieldType in wfFields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
fieldInfo = modelFields.get(fieldName)
|
||||
if fieldInfo:
|
||||
fieldAnnotation = fieldInfo.annotation
|
||||
if fieldAnnotation == list or (
|
||||
hasattr(fieldAnnotation, "__origin__") and fieldAnnotation.__origin__ is list
|
||||
):
|
||||
record[fieldName] = []
|
||||
elif fieldAnnotation == dict or (
|
||||
hasattr(fieldAnnotation, "__origin__") and fieldAnnotation.__origin__ is dict
|
||||
):
|
||||
record[fieldName] = {}
|
||||
|
||||
return {"items": rawRows, "totalItems": totalItems, "totalPages": totalPages}
|
||||
|
||||
|
||||
def _cascadeDeleteAutoWorkflow(db: DatabaseConnector, workflowId: str) -> None:
|
||||
"""Delete AutoWorkflow and dependent rows (same order as interfaceDbApp._cascadeDeleteGraphicalEditorData)."""
|
||||
wf_id = workflowId
|
||||
|
|
@ -253,7 +580,7 @@ def get_workflow_runs(
|
|||
paginationParams = PaginationParams(
|
||||
page=page,
|
||||
pageSize=limit,
|
||||
sort=[{"field": "sysCreatedAt", "direction": "desc"}],
|
||||
sort=[{"field": "startedAt", "direction": "desc"}],
|
||||
)
|
||||
|
||||
from modules.routes.routeHelpers import getRecordsetPaginatedWithFkSort
|
||||
|
|
@ -435,20 +762,10 @@ def get_system_workflows(
|
|||
sort=[{"field": "sysCreatedAt", "direction": "desc"}],
|
||||
)
|
||||
|
||||
from modules.routes.routeHelpers import getRecordsetPaginatedWithFkSort
|
||||
result = getRecordsetPaginatedWithFkSort(
|
||||
db, AutoWorkflow,
|
||||
pagination=paginationParams,
|
||||
recordFilter=recordFilter if recordFilter else None,
|
||||
)
|
||||
pageItems = result.get("items", []) if isinstance(result, dict) else result.items
|
||||
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
|
||||
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
|
||||
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels, resolveMandateLabels, resolveInstanceLabels
|
||||
|
||||
# Resolve featureCode in same pass as instance labels — need full FI object
|
||||
featureCodeMap: dict = {}
|
||||
|
||||
def _resolveInstanceLabelsWithFeatureCode(ids):
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface as _getRI
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
|
@ -471,59 +788,140 @@ def get_system_workflows(
|
|||
userMandateIds = _getUserMandateIds(userId)
|
||||
adminMandateIds = _getAdminMandateIds(userId, userMandateIds)
|
||||
|
||||
workflowIds = [w.get("id") for w in pageItems if w.get("id")]
|
||||
activeRunMap: dict = {}
|
||||
runCountMap: dict = {}
|
||||
lastStartedMap: dict = {}
|
||||
if workflowIds and db._ensureTableExists(AutoRun):
|
||||
for wfId in workflowIds:
|
||||
runs = db.getRecordset(AutoRun, recordFilter={"workflowId": wfId})
|
||||
runCountMap[wfId] = len(runs)
|
||||
for r in runs:
|
||||
rDict = dict(r)
|
||||
ts = rDict.get("sysCreatedAt")
|
||||
if ts and (lastStartedMap.get(wfId) is None or ts > lastStartedMap.get(wfId)):
|
||||
lastStartedMap[wfId] = ts
|
||||
if rDict.get("status") in ("running", "paused"):
|
||||
activeRunMap[wfId] = rDict.get("id")
|
||||
|
||||
items = []
|
||||
for w in pageItems:
|
||||
row = dict(w)
|
||||
wMandateId = row.get("mandateId")
|
||||
wfId = row.get("id")
|
||||
row["isRunning"] = wfId in activeRunMap
|
||||
row["activeRunId"] = activeRunMap.get(wfId)
|
||||
row["runCount"] = runCountMap.get(wfId, 0)
|
||||
row["lastStartedAt"] = lastStartedMap.get(wfId)
|
||||
|
||||
if context.isPlatformAdmin:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
elif wMandateId and wMandateId in adminMandateIds:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
fkSortField = _firstFkSortFieldForWorkflows(paginationParams)
|
||||
if fkSortField:
|
||||
from modules.routes.routeHelpers import getRecordsetPaginatedWithFkSort, applyFiltersAndSort
|
||||
_COMPUTED_FIELDS = {"lastStartedAt", "runCount", "isRunning"}
|
||||
hasComputedFilter = bool(
|
||||
paginationParams.filters
|
||||
and any(k in _COMPUTED_FIELDS for k in paginationParams.filters)
|
||||
)
|
||||
hasComputedSort = any(
|
||||
(s.field if hasattr(s, "field") else s.get("field", "")) in _COMPUTED_FIELDS
|
||||
for s in (paginationParams.sort or [])
|
||||
)
|
||||
dbPagination = paginationParams
|
||||
if hasComputedFilter or hasComputedSort:
|
||||
dbFilters = {
|
||||
k: v for k, v in (paginationParams.filters or {}).items()
|
||||
if k not in _COMPUTED_FIELDS
|
||||
} or None
|
||||
dbSort = [
|
||||
s for s in (paginationParams.sort or [])
|
||||
if (s.field if hasattr(s, "field") else s.get("field", "")) not in _COMPUTED_FIELDS
|
||||
]
|
||||
dbPagination = PaginationParams.model_construct(
|
||||
page=1,
|
||||
pageSize=9999,
|
||||
sort=dbSort or [{"field": "sysCreatedAt", "direction": "desc"}],
|
||||
filters=dbFilters,
|
||||
)
|
||||
result = getRecordsetPaginatedWithFkSort(
|
||||
db, AutoWorkflow,
|
||||
pagination=dbPagination,
|
||||
recordFilter=recordFilter if recordFilter else None,
|
||||
)
|
||||
pageItems = result.get("items", []) if isinstance(result, dict) else result.items
|
||||
workflowIds = [w.get("id") for w in pageItems if w.get("id")]
|
||||
statsById = _batchRunStatsForWorkflowIds(db, workflowIds)
|
||||
items = []
|
||||
for w in pageItems:
|
||||
row = dict(w)
|
||||
wfId = row.get("id")
|
||||
st = statsById.get(str(wfId)) if wfId else None
|
||||
activeRunId = st.get("activeRunId") if st else None
|
||||
row["isRunning"] = bool(activeRunId)
|
||||
row["activeRunId"] = activeRunId
|
||||
row["runCount"] = int(st.get("runCount") or 0) if st else 0
|
||||
row["lastStartedAt"] = float(st["lastStartedAt"]) if st and st.get("lastStartedAt") is not None else None
|
||||
wMandateId = row.get("mandateId")
|
||||
if context.isPlatformAdmin:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
elif wMandateId and wMandateId in adminMandateIds:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
else:
|
||||
row["canEdit"] = False
|
||||
row["canDelete"] = False
|
||||
row["canExecute"] = False
|
||||
row.pop("graph", None)
|
||||
items.append(row)
|
||||
enrichRowsWithFkLabels(
|
||||
items,
|
||||
labelResolvers={
|
||||
"mandateId": resolveMandateLabels,
|
||||
"featureInstanceId": _resolveInstanceLabelsWithFeatureCode,
|
||||
},
|
||||
)
|
||||
for row in items:
|
||||
row["instanceLabel"] = row.pop("featureInstanceIdLabel", None)
|
||||
row["mandateLabel"] = row.pop("mandateIdLabel", None)
|
||||
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"))
|
||||
if hasComputedFilter or hasComputedSort:
|
||||
computedFilters = {
|
||||
k: v for k, v in (paginationParams.filters or {}).items()
|
||||
if k in _COMPUTED_FIELDS
|
||||
}
|
||||
computedSort = [
|
||||
s for s in (paginationParams.sort or [])
|
||||
if (s.field if hasattr(s, "field") else s.get("field", "")) in _COMPUTED_FIELDS
|
||||
]
|
||||
computedPagination = PaginationParams.model_construct(
|
||||
page=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
sort=computedSort or [],
|
||||
filters=computedFilters or None,
|
||||
)
|
||||
filtered = applyFiltersAndSort(items, computedPagination)
|
||||
totalItems = filtered.get("totalItems", len(items))
|
||||
totalPages = filtered.get("totalPages", 1)
|
||||
items = filtered.get("items", items)
|
||||
else:
|
||||
row["canEdit"] = False
|
||||
row["canDelete"] = False
|
||||
row["canExecute"] = False
|
||||
|
||||
row.pop("graph", None)
|
||||
items.append(row)
|
||||
|
||||
enrichRowsWithFkLabels(
|
||||
items,
|
||||
labelResolvers={
|
||||
"mandateId": resolveMandateLabels,
|
||||
"featureInstanceId": _resolveInstanceLabelsWithFeatureCode,
|
||||
},
|
||||
)
|
||||
for row in items:
|
||||
row["instanceLabel"] = row.pop("featureInstanceIdLabel", None)
|
||||
row["mandateLabel"] = row.pop("mandateIdLabel", None)
|
||||
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"))
|
||||
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
|
||||
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
|
||||
else:
|
||||
result = _getWorkflowsJoinedPaginated(
|
||||
db, recordFilter if recordFilter else {}, paginationParams,
|
||||
)
|
||||
pageItems = result.get("items", [])
|
||||
totalItems = result.get("totalItems", 0)
|
||||
totalPages = result.get("totalPages", 0)
|
||||
items = []
|
||||
for row in pageItems:
|
||||
wMandateId = row.get("mandateId")
|
||||
wfId = row.get("id")
|
||||
activeRunId = row.get("activeRunId")
|
||||
if row.get("runCount") is not None:
|
||||
row["runCount"] = int(row["runCount"])
|
||||
row["isRunning"] = bool(activeRunId)
|
||||
if context.isPlatformAdmin:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
elif wMandateId and wMandateId in adminMandateIds:
|
||||
row["canEdit"] = True
|
||||
row["canDelete"] = True
|
||||
row["canExecute"] = True
|
||||
else:
|
||||
row["canEdit"] = False
|
||||
row["canDelete"] = False
|
||||
row["canExecute"] = False
|
||||
row.pop("graph", None)
|
||||
items.append(row)
|
||||
enrichRowsWithFkLabels(
|
||||
items,
|
||||
labelResolvers={
|
||||
"mandateId": resolveMandateLabels,
|
||||
"featureInstanceId": _resolveInstanceLabelsWithFeatureCode,
|
||||
},
|
||||
)
|
||||
for row in items:
|
||||
row["instanceLabel"] = row.pop("featureInstanceIdLabel", None)
|
||||
row["mandateLabel"] = row.pop("mandateIdLabel", None)
|
||||
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"))
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
|
|
@ -593,6 +991,23 @@ def delete_system_workflow(
|
|||
# Filter-values endpoints (for FormGeneratorTable column filters)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_SYNTHETIC_TIMESTAMP_FIELDS = {"lastStartedAt"}
|
||||
|
||||
|
||||
def _isTimestampColumn(modelClass, column: str) -> bool:
|
||||
"""Check if a column is a timestamp field (PeriodPicker, no discrete values needed)."""
|
||||
if column in _SYNTHETIC_TIMESTAMP_FIELDS:
|
||||
return True
|
||||
fields = getattr(modelClass, "model_fields", {})
|
||||
fieldInfo = fields.get(column)
|
||||
if not fieldInfo:
|
||||
return False
|
||||
extra = getattr(fieldInfo, "json_schema_extra", None)
|
||||
if isinstance(extra, dict):
|
||||
return extra.get("frontend_type") == "timestamp"
|
||||
return False
|
||||
|
||||
|
||||
def _enrichedFilterValues(
|
||||
db, context: RequestContext, modelClass, scopeFilter, column: str,
|
||||
):
|
||||
|
|
@ -602,6 +1017,9 @@ def _enrichedFilterValues(
|
|||
objects so the frontend can display human-readable labels in the dropdown
|
||||
without a separate source fk fetch. Non-FK columns return ``string | null``.
|
||||
|
||||
Timestamp columns (sysCreatedAt, lastStartedAt) return an empty list because
|
||||
the frontend uses a PeriodPicker (range selector) — no discrete values needed.
|
||||
|
||||
``null`` is included when rows with NULL/empty values exist (enables the
|
||||
"(Leer)" filter option).
|
||||
|
||||
|
|
@ -610,6 +1028,9 @@ def _enrichedFilterValues(
|
|||
from fastapi.responses import JSONResponse
|
||||
from modules.routes.routeHelpers import resolveMandateLabels, resolveInstanceLabels
|
||||
|
||||
if _isTimestampColumn(modelClass, column):
|
||||
return JSONResponse(content=[])
|
||||
|
||||
if column in ("mandateLabel", "mandateId"):
|
||||
baseFilter = scopeFilter(context)
|
||||
recordFilter = dict(baseFilter) if baseFilter else {}
|
||||
|
|
@ -828,7 +1249,10 @@ def stop_workflow_run(
|
|||
currentStatus = run.get("status", "")
|
||||
if currentStatus in ("completed", "failed", "stopped"):
|
||||
return {"status": currentStatus, "runId": runId, "message": "Run already finished"}
|
||||
db.recordModify(AutoRun, runId, {"status": "stopped"})
|
||||
stopUpdates = {"status": "stopped"}
|
||||
if not run.get("completedAt"):
|
||||
stopUpdates["completedAt"] = time.time()
|
||||
db.recordModify(AutoRun, runId, stopUpdates)
|
||||
return {"status": "stopped", "runId": runId, "message": "Run not active in memory, marked as stopped"}
|
||||
|
||||
return {"status": "stopping", "runId": runId, "message": "Stop signal sent"}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Inline emoji-font fallback for the ReportLab-based PDF renderer.
|
||||
|
||||
The default ReportLab core fonts (Helvetica, Times, Courier) only cover
|
||||
WinAnsi (Latin-1 + a handful of typographic glyphs). Codepoints from the
|
||||
Unicode Symbols / Pictographs / Emoji blocks render as a missing-glyph
|
||||
square ("tofu") or are dropped entirely.
|
||||
|
||||
This module bundles a single TrueType emoji font (Noto Emoji, monochrome,
|
||||
SIL Open Font License) and exposes `wrapEmojiSpansInXml` which rewrites
|
||||
already-built ReportLab mini-XML so any character that the emoji font can
|
||||
draw is wrapped in `<font name="NotoEmoji">...</font>`. ReportLab's
|
||||
Paragraph parser supports nested <font> tags, so emoji spans nest cleanly
|
||||
inside <b>, <i>, and <font name="Courier"> markup produced elsewhere.
|
||||
|
||||
ReportLab does not natively color emoji (CBDT/COLR/SBIX glyph tables are
|
||||
not honoured by its TTF backend) — Noto Emoji is intentionally a
|
||||
monochrome outline font, which is the only flavour that will render at all.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import FrozenSet, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
EMOJI_FONT_NAME = "NotoEmoji"
|
||||
|
||||
_RENDERER_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
_GATEWAY_ROOT = os.path.abspath(os.path.join(_RENDERER_DIR, "..", "..", "..", "..", ".."))
|
||||
_FONT_PATH = os.path.join(_GATEWAY_ROOT, "assets", "fonts", "NotoEmoji-Regular.ttf")
|
||||
|
||||
# Below 0x2000 the WinAnsi-style core fonts already cover Latin-1, common
|
||||
# diacritics and basic punctuation. We only swap to the emoji font for
|
||||
# higher codepoints so umlauts, copyright, NBSP, etc. stay visually
|
||||
# consistent with surrounding body text.
|
||||
_EMOJI_RANGE_START = 0x2000
|
||||
|
||||
_supportedCodepoints: Optional[FrozenSet[int]] = None
|
||||
_initAttempted = False
|
||||
|
||||
|
||||
def _initialize() -> bool:
|
||||
"""Register the emoji TTF with ReportLab and capture its cmap.
|
||||
|
||||
Lazy + idempotent: the renderer may instantiate before reportlab is
|
||||
imported in the worker process, and tests that don't generate PDFs
|
||||
must not pay the registration cost.
|
||||
"""
|
||||
global _initAttempted, _supportedCodepoints
|
||||
if _initAttempted:
|
||||
return _supportedCodepoints is not None
|
||||
_initAttempted = True
|
||||
|
||||
if not os.path.exists(_FONT_PATH):
|
||||
logger.warning(
|
||||
"Emoji font not found at %s — emoji codepoints in PDFs will render as tofu",
|
||||
_FONT_PATH,
|
||||
)
|
||||
return False
|
||||
|
||||
try:
|
||||
from reportlab.pdfbase import pdfmetrics
|
||||
from reportlab.pdfbase.ttfonts import TTFont
|
||||
except ImportError:
|
||||
logger.warning("reportlab not installed; cannot register emoji font")
|
||||
return False
|
||||
|
||||
try:
|
||||
font = TTFont(EMOJI_FONT_NAME, _FONT_PATH)
|
||||
pdfmetrics.registerFont(font)
|
||||
# `face.charToGlyph` is built lazily on first use; force population
|
||||
# so the mapping is available for our coverage check below.
|
||||
cmap = getattr(font.face, "charToGlyph", None) or {}
|
||||
if not cmap:
|
||||
from fontTools.ttLib import TTFont as FtTTFont
|
||||
cmap = FtTTFont(_FONT_PATH).getBestCmap()
|
||||
_supportedCodepoints = frozenset(
|
||||
cp for cp in cmap.keys() if cp >= _EMOJI_RANGE_START
|
||||
)
|
||||
logger.info(
|
||||
"Registered emoji font '%s' with %d renderable codepoints (>= U+%04X)",
|
||||
EMOJI_FONT_NAME,
|
||||
len(_supportedCodepoints),
|
||||
_EMOJI_RANGE_START,
|
||||
)
|
||||
return True
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to register emoji font: %s", exc)
|
||||
_supportedCodepoints = None
|
||||
return False
|
||||
|
||||
|
||||
_TAG_RE = re.compile(r"<[^>]+>")
|
||||
|
||||
|
||||
def wrapEmojiSpansInXml(xml: str) -> str:
|
||||
"""Wrap consecutive emoji codepoints with <font name="NotoEmoji">…</font>.
|
||||
|
||||
Operates on already-XML-escaped ReportLab markup. Tag markers
|
||||
(`<...>`) are skipped so we never insert a font tag inside another
|
||||
tag's attribute list. Codepoints that the emoji font cannot draw
|
||||
pass through unchanged so the default body font still gets a chance
|
||||
(e.g. U+200D zero-width-joiner has no glyph in Noto Emoji and would
|
||||
otherwise render as tofu inside a forced <font> span).
|
||||
"""
|
||||
if not xml:
|
||||
return xml
|
||||
if not _initialize() or not _supportedCodepoints:
|
||||
return xml
|
||||
|
||||
cps = _supportedCodepoints
|
||||
out: list[str] = []
|
||||
i = 0
|
||||
n = len(xml)
|
||||
while i < n:
|
||||
# Skip past any markup tag verbatim — emojis inside attribute
|
||||
# values would be unusual but harmless; the simpler invariant
|
||||
# "we never split a `<...>` token" keeps the rewrite safe.
|
||||
if xml[i] == "<":
|
||||
tagEnd = xml.find(">", i)
|
||||
if tagEnd == -1:
|
||||
out.append(xml[i:])
|
||||
break
|
||||
out.append(xml[i : tagEnd + 1])
|
||||
i = tagEnd + 1
|
||||
continue
|
||||
|
||||
if ord(xml[i]) in cps:
|
||||
j = i
|
||||
while j < n and xml[j] != "<" and ord(xml[j]) in cps:
|
||||
j += 1
|
||||
out.append(f'<font name="{EMOJI_FONT_NAME}">')
|
||||
out.append(xml[i:j])
|
||||
out.append("</font>")
|
||||
i = j
|
||||
continue
|
||||
|
||||
out.append(xml[i])
|
||||
i += 1
|
||||
return "".join(out)
|
||||
|
|
@ -27,6 +27,8 @@ except ImportError:
|
|||
|
||||
import re as _re_pdf
|
||||
|
||||
from ._pdfFontFallback import wrapEmojiSpansInXml as _wrapEmojiSpansInXml
|
||||
|
||||
# A4 width in pt; margins must match SimpleDocTemplate(leftMargin/rightMargin)
|
||||
_PDF_MARGIN_LR_PT = 72.0
|
||||
_PDF_A4_WIDTH_PT = 595.27
|
||||
|
|
@ -622,6 +624,8 @@ class RendererPdf(BaseRenderer):
|
|||
"""Turn common markdown inline (**bold**, *italic*, `code`) into ReportLab XML.
|
||||
Backtick spans are extracted first so paths like `.../<Slug>/...` are not corrupted by
|
||||
markdown patterns and XML escaping stays well-formed inside <font name=\"Courier\">.
|
||||
Emoji codepoints are wrapped in <font name="NotoEmoji">...</font> so they render
|
||||
as monochrome glyphs instead of missing-glyph squares from the WinAnsi core fonts.
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
|
|
@ -635,7 +639,7 @@ class RendererPdf(BaseRenderer):
|
|||
out.append(f'<font name="Courier">{self._escapeReportlabXml(code)}</font>')
|
||||
pos = m.end()
|
||||
out.append(self._applyInlineMarkdownToEscapedPlain(text[pos:]))
|
||||
return "".join(out)
|
||||
return _wrapEmojiSpansInXml("".join(out))
|
||||
|
||||
def _paragraphFromInlineMarkdown(self, text: str, style: ParagraphStyle) -> Paragraph:
|
||||
return Paragraph(self._markdownInlineToReportlabXml(text), style)
|
||||
|
|
|
|||
|
|
@ -311,6 +311,61 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
|
|||
|
||||
attributes.append(attr_def)
|
||||
|
||||
# Pydantic v2 computed fields (@computed_field). These are read-only properties
|
||||
# serialized into ``model_dump()`` and exposed to the frontend as ordinary
|
||||
# attributes so resolveColumnTypes() can pick them up (label / type / format
|
||||
# labels / options). No DB persistence — they are derived from regular fields.
|
||||
computedFields = getattr(modelClass, "model_computed_fields", {}) or {}
|
||||
for name, computedInfo in computedFields.items():
|
||||
jsonExtra = getattr(computedInfo, "json_schema_extra", None) or {}
|
||||
if callable(jsonExtra):
|
||||
jsonExtra = {}
|
||||
|
||||
frontendType = jsonExtra.get("frontend_type") if isinstance(jsonExtra, dict) else None
|
||||
if not frontendType:
|
||||
returnType = getattr(computedInfo, "return_type", None)
|
||||
typeName = getattr(returnType, "__name__", None) or str(returnType)
|
||||
frontendType = "checkbox" if typeName == "bool" else "text"
|
||||
|
||||
frontendVisible = (
|
||||
jsonExtra.get("frontend_visible", True) if isinstance(jsonExtra, dict) else True
|
||||
)
|
||||
frontendFormat = (
|
||||
jsonExtra.get("frontend_format") if isinstance(jsonExtra, dict) else None
|
||||
)
|
||||
frontendFormatLabels = (
|
||||
jsonExtra.get("frontend_format_labels") if isinstance(jsonExtra, dict) else None
|
||||
)
|
||||
frontendOptions = (
|
||||
jsonExtra.get("frontend_options") if isinstance(jsonExtra, dict) else None
|
||||
)
|
||||
|
||||
attrDef = {
|
||||
"name": name,
|
||||
"type": frontendType,
|
||||
"required": False,
|
||||
"description": str(getattr(computedInfo, "description", "") or ""),
|
||||
"label": labels.get(name, name),
|
||||
"placeholder": "",
|
||||
"editable": False,
|
||||
"visible": frontendVisible,
|
||||
"order": len(attributes),
|
||||
"readonly": True,
|
||||
"options": _resolveOptionLabels(frontendOptions),
|
||||
"default": None,
|
||||
}
|
||||
|
||||
if frontendFormat:
|
||||
attrDef["frontendFormat"] = frontendFormat
|
||||
if frontendFormatLabels and isinstance(frontendFormatLabels, list):
|
||||
from modules.shared.i18nRegistry import resolveText
|
||||
attrDef["frontendFormatLabels"] = [
|
||||
resolveText(lbl) if isinstance(lbl, (str, dict)) else str(lbl)
|
||||
for lbl in frontendFormatLabels
|
||||
]
|
||||
|
||||
attributes.append(attrDef)
|
||||
|
||||
return {"model": model_label, "attributes": attributes}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -82,6 +82,11 @@ class FkRelationship:
|
|||
targetDb: str
|
||||
targetTable: str
|
||||
targetColumn: str
|
||||
# Soft references hold sentinel / lineage values that are intentionally
|
||||
# not backed by a DB row (e.g. AutoWorkflow.templateSourceId can store an
|
||||
# in-code template ID like "trustee-receipt-import"). The orphan scanner
|
||||
# MUST skip these to avoid deleting valid records.
|
||||
softFk: bool = False
|
||||
|
||||
|
||||
def _buildTableToDbMap() -> Dict[str, str]:
|
||||
|
|
@ -192,6 +197,7 @@ def _discoverFkRelationships() -> List[FkRelationship]:
|
|||
targetDb = fkTarget.get("db", "")
|
||||
targetTable = fkTarget.get("table", "")
|
||||
targetColumn = fkTarget.get("column", "id")
|
||||
softFk = bool(fkTarget.get("softFk", False))
|
||||
|
||||
if not targetDb or not targetTable:
|
||||
continue
|
||||
|
|
@ -204,6 +210,7 @@ def _discoverFkRelationships() -> List[FkRelationship]:
|
|||
targetDb=targetDb,
|
||||
targetTable=targetTable,
|
||||
targetColumn=targetColumn,
|
||||
softFk=softFk,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -226,6 +226,30 @@ def i18nModel(modelLabel: str, aiContext: str = ""):
|
|||
if isinstance(token, str) and token.strip():
|
||||
t(token, fmtCtx, "")
|
||||
|
||||
# Pydantic v2 computed fields (@computed_field) — same handling as
|
||||
# regular model_fields so labels and frontend_format_labels are
|
||||
# registered for i18n and appear in MODEL_LABELS.
|
||||
computedFields = getattr(cls, "model_computed_fields", {}) or {}
|
||||
for fieldName, computedInfo in computedFields.items():
|
||||
extra = getattr(computedInfo, "json_schema_extra", None)
|
||||
if callable(extra) or not isinstance(extra, dict):
|
||||
attributes.setdefault(fieldName, fieldName)
|
||||
continue
|
||||
label = extra.get("label")
|
||||
if label:
|
||||
desc = getattr(computedInfo, "description", "") or ""
|
||||
t(label, f"table.{className}.{fieldName}", desc)
|
||||
attributes[fieldName] = label
|
||||
else:
|
||||
attributes.setdefault(fieldName, fieldName)
|
||||
|
||||
formatLabels = extra.get("frontend_format_labels")
|
||||
if isinstance(formatLabels, list):
|
||||
fmtCtx = f"table.{className}.{fieldName}.format"
|
||||
for token in formatLabels:
|
||||
if isinstance(token, str) and token.strip():
|
||||
t(token, fmtCtx, "")
|
||||
|
||||
MODEL_LABELS[className] = {
|
||||
"model": modelLabel,
|
||||
"attributes": attributes,
|
||||
|
|
@ -610,6 +634,7 @@ def _registerDatamodelOptionLabels():
|
|||
"modules.datamodels.datamodelDataSource",
|
||||
"modules.datamodels.datamodelFeatureDataSource",
|
||||
"modules.datamodels.datamodelUiLanguage",
|
||||
"modules.datamodels.datamodelViews",
|
||||
"modules.features.trustee.datamodelFeatureTrustee",
|
||||
"modules.features.neutralization.datamodelFeatureNeutralizer",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -339,6 +339,14 @@ def _scanOrphans(dbFilter: Optional[str] = None) -> List[dict]:
|
|||
try:
|
||||
for rel in relationships:
|
||||
try:
|
||||
if rel.softFk:
|
||||
logger.debug(
|
||||
"Skipping soft FK %s.%s.%s -> %s.%s.%s",
|
||||
rel.sourceDb, rel.sourceTable, rel.sourceColumn,
|
||||
rel.targetDb, rel.targetTable, rel.targetColumn,
|
||||
)
|
||||
continue
|
||||
|
||||
sourceTables = _existingTables(rel.sourceDb)
|
||||
if rel.sourceTable not in sourceTables:
|
||||
continue
|
||||
|
|
@ -458,6 +466,12 @@ def _cleanOrphans(db: str, table: str, column: str, force: bool = False) -> int:
|
|||
)
|
||||
if rel is None:
|
||||
raise ValueError(f"No FK relationship found for {db}.{table}.{column}")
|
||||
if rel.softFk:
|
||||
raise OrphanCleanupRefused(
|
||||
f"Refusing cleanup: {rel.sourceDb}.{rel.sourceTable}.{rel.sourceColumn} is "
|
||||
f"declared as a soft FK (sentinel / lineage reference) and is intentionally "
|
||||
f"excluded from orphan deletion."
|
||||
)
|
||||
|
||||
conn = _getConnection(rel.sourceDb)
|
||||
targetConn = None
|
||||
|
|
@ -571,18 +585,38 @@ def _cleanOrphans(db: str, table: str, column: str, force: bool = False) -> int:
|
|||
return deleted
|
||||
|
||||
|
||||
def _cleanAllOrphans(force: bool = False) -> List[dict]:
|
||||
def _isUserIdFk(targetTable: str, targetColumn: str) -> bool:
|
||||
"""Match the UserInDB.id reference exactly (case-insensitive on table name).
|
||||
|
||||
Orphans pointing at deleted users are a distinct category: they accumulate
|
||||
naturally on every audit / billing / membership row when a user is deleted,
|
||||
and the SysAdmin typically wants to handle them separately from "real" FK
|
||||
drift. The orphan UI exposes a checkbox `excludeUserFks` that hides them
|
||||
from the list and skips them in `clean-all`; this helper keeps the rule
|
||||
in one place so frontend + clean-all + scan stay in lock-step.
|
||||
"""
|
||||
return targetTable.lower() == "userindb" and targetColumn == "id"
|
||||
|
||||
|
||||
def _cleanAllOrphans(force: bool = False, excludeUserFks: bool = False) -> List[dict]:
|
||||
"""Clean all detected orphans. Returns list of {db, table, column, deleted, [error|skipped]}.
|
||||
|
||||
Safety: each individual cleanup re-validates target row counts at delete-time
|
||||
to avoid cascading wipes (e.g. one delete emptying a target table that the
|
||||
next iteration depends on). Without force=True, dangerous cleanups are skipped.
|
||||
|
||||
When ``excludeUserFks=True``, FK relationships pointing at ``UserInDB.id``
|
||||
are skipped entirely — those orphans (deleted-user remnants in audit /
|
||||
billing / membership tables) are typically handled by a dedicated user
|
||||
purge workflow, not by generic FK cleanup.
|
||||
"""
|
||||
orphans = _scanOrphans()
|
||||
results = []
|
||||
for orphan in orphans:
|
||||
if orphan.get("orphanCount", 0) <= 0:
|
||||
continue
|
||||
if excludeUserFks and _isUserIdFk(orphan.get("targetTable", ""), orphan.get("targetColumn", "")):
|
||||
continue
|
||||
try:
|
||||
deleted = _cleanOrphans(
|
||||
orphan["sourceDb"],
|
||||
|
|
@ -651,6 +685,8 @@ def _listOrphans(
|
|||
)
|
||||
if rel is None:
|
||||
raise ValueError(f"No FK relationship found for {db}.{table}.{column}")
|
||||
if rel.softFk:
|
||||
return []
|
||||
|
||||
safeLimit = max(1, min(int(limit), 10000))
|
||||
|
||||
|
|
|
|||
|
|
@ -251,3 +251,28 @@ def test_inline_code_angle_brackets_escaped_in_font_span():
|
|||
xml = renderer._markdownInlineToReportlabXml("unter `Eingabe/<Slug>/` speichern")
|
||||
assert 'name="Courier"' in xml
|
||||
assert "<Slug>" in xml
|
||||
|
||||
|
||||
def test_emoji_codepoints_wrapped_in_emoji_font_span():
|
||||
"""Emoji codepoints must be wrapped in <font name="NotoEmoji">…</font> so
|
||||
ReportLab swaps to the Noto Emoji TTF instead of producing missing-glyph squares."""
|
||||
if not REPORTLAB_AVAILABLE:
|
||||
pytest.skip("reportlab is not installed")
|
||||
renderer = RendererPdf(services=_fakeServices())
|
||||
xml = renderer._markdownInlineToReportlabXml("Status: \U0001F600 done \U0001F389")
|
||||
# Either the font registered (preferred) and wrapped, or font missing and
|
||||
# text passes through unchanged. Both branches must keep the body readable.
|
||||
from modules.serviceCenter.services.serviceGeneration.renderers._pdfFontFallback import (
|
||||
_initialize as _emojiInit,
|
||||
)
|
||||
if _emojiInit():
|
||||
assert 'name="NotoEmoji"' in xml
|
||||
assert "\U0001F600" in xml
|
||||
assert "\U0001F389" in xml
|
||||
else:
|
||||
assert "\U0001F600" in xml
|
||||
# Bold + emoji must produce nested font tag inside <b>...</b>
|
||||
xmlBold = renderer._markdownInlineToReportlabXml("**OK \U00002705**")
|
||||
assert "<b>" in xmlBold and "</b>" in xmlBold
|
||||
if _emojiInit():
|
||||
assert 'name="NotoEmoji"' in xmlBold
|
||||
|
|
|
|||
Loading…
Reference in a new issue