Compare commits
8 commits
5455e09367
...
d4c96876ae
| Author | SHA1 | Date | |
|---|---|---|---|
| d4c96876ae | |||
| c698413eaa | |||
|
|
218c58709e | ||
|
|
a6806dd04b | ||
| 51dfb007f6 | |||
| 0659d0d21a | |||
| 9115d9eec8 | |||
| da974190ea |
77 changed files with 4352 additions and 1399 deletions
3
app.py
3
app.py
|
|
@ -600,6 +600,9 @@ app.include_router(promptRouter)
|
|||
from modules.routes.routeDataConnections import router as connectionsRouter
|
||||
app.include_router(connectionsRouter)
|
||||
|
||||
from modules.routes.routeTableViews import router as tableViewsRouter
|
||||
app.include_router(tableViewsRouter)
|
||||
|
||||
from modules.routes.routeSecurityLocal import router as localRouter
|
||||
app.include_router(localRouter)
|
||||
|
||||
|
|
|
|||
|
|
@ -351,6 +351,7 @@ class AiAnthropic(BaseConnectorAi):
|
|||
|
||||
# Parse response
|
||||
anthropicResponse = response.json()
|
||||
stop_reason = anthropicResponse.get("stop_reason")
|
||||
|
||||
# Extract content and tool_use blocks from response
|
||||
content = ""
|
||||
|
|
@ -374,9 +375,25 @@ class AiAnthropic(BaseConnectorAi):
|
|||
|
||||
if not content and not toolCalls:
|
||||
logger.warning(f"Anthropic API returned empty content. Full response: {anthropicResponse}")
|
||||
content = "[Anthropic API returned empty response]"
|
||||
err = (
|
||||
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||
if stop_reason == "refusal"
|
||||
else f"Anthropic returned no assistant text (stop_reason={stop_reason or 'unknown'})."
|
||||
)
|
||||
return AiModelResponse(
|
||||
content="",
|
||||
success=False,
|
||||
error=err,
|
||||
modelId=model.name,
|
||||
metadata={
|
||||
"response_id": anthropicResponse.get("id", ""),
|
||||
"stop_reason": stop_reason,
|
||||
},
|
||||
)
|
||||
|
||||
metadata = {"response_id": anthropicResponse.get("id", "")}
|
||||
if stop_reason:
|
||||
metadata["stop_reason"] = stop_reason
|
||||
if toolCalls:
|
||||
metadata["toolCalls"] = toolCalls
|
||||
|
||||
|
|
@ -492,6 +509,19 @@ class AiAnthropic(BaseConnectorAi):
|
|||
f"Anthropic stream returned empty response: model={model.name}, "
|
||||
f"stopReason={stopReason}"
|
||||
)
|
||||
err = (
|
||||
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||
if stopReason == "refusal"
|
||||
else f"Anthropic returned no assistant text (stop_reason={stopReason or 'unknown'})."
|
||||
)
|
||||
yield AiModelResponse(
|
||||
content="",
|
||||
success=False,
|
||||
error=err,
|
||||
modelId=model.name,
|
||||
metadata={"stopReason": stopReason} if stopReason else {},
|
||||
)
|
||||
return
|
||||
|
||||
metadata: Dict[str, Any] = {}
|
||||
if stopReason:
|
||||
|
|
|
|||
|
|
@ -834,7 +834,10 @@ class DatabaseConnector:
|
|||
createdTs = record.get("sysCreatedAt")
|
||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||
record["sysCreatedAt"] = currentTime
|
||||
if effective_user_id:
|
||||
# Do not wipe caller-provided sysCreatedBy (e.g. FileItem from createFile with
|
||||
# real user). ContextVar can be "system" for the DB pool while the business
|
||||
# user is set on the record from model_dump().
|
||||
if effective_user_id and not record.get("sysCreatedBy"):
|
||||
record["sysCreatedBy"] = effective_user_id
|
||||
elif not record.get("sysCreatedBy"):
|
||||
if effective_user_id:
|
||||
|
|
@ -1531,7 +1534,7 @@ class DatabaseConnector:
|
|||
createdTs = rec.get("sysCreatedAt")
|
||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||
rec["sysCreatedAt"] = currentTime
|
||||
if effectiveUserId:
|
||||
if effectiveUserId and not rec.get("sysCreatedBy"):
|
||||
rec["sysCreatedBy"] = effectiveUserId
|
||||
elif not rec.get("sysCreatedBy") and effectiveUserId:
|
||||
rec["sysCreatedBy"] = effectiveUserId
|
||||
|
|
|
|||
|
|
@ -210,6 +210,9 @@ class ClickupListsAdapter(ServiceAdapter):
|
|||
data = await self._svc.getTask(task_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return json.dumps(data).encode("utf-8")
|
||||
returnedId = data.get("id", "") if isinstance(data, dict) else ""
|
||||
if returnedId and returnedId != task_id:
|
||||
logger.warning(f"ClickUp download: requested task_id={task_id} but API returned id={returnedId}")
|
||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||
|
||||
|
|
|
|||
|
|
@ -155,9 +155,12 @@ def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
|||
return coerceDocumentReferenceList(value[innerKey])
|
||||
docId = value.get("documentId") or value.get("id")
|
||||
if docId:
|
||||
docIdStr = str(docId)
|
||||
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||
return DocumentReferenceList.from_string_list([docIdStr])
|
||||
return DocumentReferenceList(references=[
|
||||
DocumentItemReference(
|
||||
documentId=str(docId),
|
||||
documentId=docIdStr,
|
||||
fileName=value.get("fileName") or value.get("name"),
|
||||
)
|
||||
])
|
||||
|
|
@ -180,8 +183,13 @@ def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
|||
continue
|
||||
docId = item.get("documentId") or item.get("id")
|
||||
if docId:
|
||||
docIdStr = str(docId)
|
||||
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||
parsed = DocumentReferenceList.from_string_list([docIdStr])
|
||||
references.extend(parsed.references)
|
||||
else:
|
||||
references.append(DocumentItemReference(
|
||||
documentId=str(docId),
|
||||
documentId=docIdStr,
|
||||
fileName=item.get("fileName") or item.get("name"),
|
||||
))
|
||||
elif item.get("label"):
|
||||
|
|
|
|||
|
|
@ -10,6 +10,69 @@ import uuid
|
|||
import base64
|
||||
|
||||
|
||||
@i18nModel("Ordner")
|
||||
class FileFolder(PowerOnModel):
|
||||
"""Persistenter Datei-Ordner im Management-DB-Kontext (RBAC wie FileItem)."""
|
||||
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
description="Primary key",
|
||||
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||
)
|
||||
name: str = Field(
|
||||
description="Display name of the folder",
|
||||
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||
)
|
||||
parentId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Parent folder id; empty or None for root",
|
||||
json_schema_extra={
|
||||
"label": "Uebergeordneter Ordner",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default="",
|
||||
description="ID of the mandate this folder belongs to",
|
||||
json_schema_extra={
|
||||
"label": "Mandant",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default="",
|
||||
description="ID of the feature instance this folder belongs to",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"label": "Sichtbarkeit", "frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": "Persönlich"},
|
||||
{"value": "featureInstance", "label": "Feature-Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "global", "label": "Global"},
|
||||
]},
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether files in this folder should be neutralized before AI processing",
|
||||
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
|
||||
)
|
||||
|
||||
|
||||
@i18nModel("Datei")
|
||||
class FileItem(PowerOnModel):
|
||||
"""Metadaten einer gespeicherten Datei."""
|
||||
|
|
@ -44,6 +107,17 @@ class FileItem(PowerOnModel):
|
|||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
folderId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="ID of the folder containing this file (if any)",
|
||||
json_schema_extra={
|
||||
"label": "Ordner",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mimeType: str = Field(
|
||||
description="MIME type of the file",
|
||||
json_schema_extra={"label": "MIME-Typ", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||
|
|
|
|||
|
|
@ -9,50 +9,95 @@ All models use camelStyle naming convention for consistency with frontend.
|
|||
from typing import List, Dict, Any, Optional, Generic, TypeVar
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
import math
|
||||
import uuid
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Table Grouping models
|
||||
# Group layout models (Strategy B — derived from Views, purely presentational)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TableGroupNode(BaseModel):
|
||||
class GroupByLevel(BaseModel):
|
||||
"""One level of a multi-level grouping definition, stored inside a TableListView config."""
|
||||
field: str = Field(..., description="Field key to group by")
|
||||
nullLabel: str = Field(default="—", description="Display label for null/empty values")
|
||||
direction: str = Field(
|
||||
default="asc",
|
||||
description="Order of group bands at this level: 'asc' or 'desc'",
|
||||
)
|
||||
|
||||
|
||||
class GroupBand(BaseModel):
|
||||
"""
|
||||
A single node in a user-defined group tree for a FormGeneratorTable.
|
||||
A contiguous block of rows that share the same group path, intersecting the current page.
|
||||
|
||||
Items belong to exactly one group (no multi-membership).
|
||||
Groups can be nested to arbitrary depth via subGroups.
|
||||
startRowIndex and rowCount are 0-based indices relative to the current page's items[].
|
||||
"""
|
||||
id: str
|
||||
name: str
|
||||
itemIds: List[str] = Field(default_factory=list)
|
||||
subGroups: List['TableGroupNode'] = Field(default_factory=list)
|
||||
order: int = 0
|
||||
isExpanded: bool = True
|
||||
|
||||
TableGroupNode.model_rebuild()
|
||||
path: List[str] = Field(..., description="Hierarchical group key (one entry per level)")
|
||||
label: str = Field(..., description="Display label for this band (last path element)")
|
||||
startRowIndex: int = Field(..., description="0-based start index within items[] on this page")
|
||||
rowCount: int = Field(..., description="Number of items in this band on this page")
|
||||
|
||||
|
||||
class TableGrouping(BaseModel):
|
||||
class GroupLayout(BaseModel):
|
||||
"""
|
||||
Persisted grouping configuration for one (user, contextKey) pair.
|
||||
Stored in table_groupings in poweron_app (auto-created).
|
||||
Grouping structure for the current response page.
|
||||
Included only when the effective view has groupByLevels configured.
|
||||
The frontend renders group header rows by iterating bands and inserting
|
||||
headers before each startRowIndex.
|
||||
"""
|
||||
levels: List[str] = Field(..., description="Ordered field keys that define the grouping hierarchy")
|
||||
bands: List[GroupBand] = Field(..., description="Bands intersecting the current page, in order")
|
||||
|
||||
|
||||
class AppliedViewMeta(BaseModel):
|
||||
"""Minimal metadata about the view that was applied to this response."""
|
||||
viewKey: Optional[str] = None
|
||||
displayName: Optional[str] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Persisted view model
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TableListView(BaseModel):
|
||||
"""
|
||||
A saved table view for one (userId, contextKey) pair.
|
||||
|
||||
config schema (schemaVersion=1):
|
||||
{
|
||||
"schemaVersion": 1,
|
||||
"filters": {}, # same structure as PaginationParams.filters
|
||||
"sort": [], # same structure as PaginationParams.sort
|
||||
"groupByLevels": [ # ordered grouping levels
|
||||
{"field": "scope", "nullLabel": "—", "direction": "asc"}
|
||||
],
|
||||
"collapsedSectionKeys": [], # optional: section UI (stable group keys)
|
||||
"collapsedGroupKeys": [], # optional: inline group bands (path.join('///'))
|
||||
}
|
||||
|
||||
contextKey convention: API path without /api/ prefix and without trailing slash.
|
||||
Examples: "connections", "prompts", "admin/users", "trustee/{instanceId}/documents"
|
||||
Examples: "connections", "prompts", "admin/users", "files/list"
|
||||
|
||||
viewKey is a user-defined slug, unique per (userId, mandateId, contextKey).
|
||||
"""
|
||||
id: str
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str
|
||||
mandateId: Optional[str] = None
|
||||
contextKey: str
|
||||
rootGroups: List[TableGroupNode] = Field(default_factory=list)
|
||||
viewKey: str
|
||||
displayName: str
|
||||
config: Dict[str, Any] = Field(default_factory=dict)
|
||||
updatedAt: Optional[float] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sort and pagination models
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class SortField(BaseModel):
|
||||
"""
|
||||
Single sort field configuration.
|
||||
"""
|
||||
"""Single sort field configuration."""
|
||||
field: str = Field(..., description="Field name to sort by")
|
||||
direction: str = Field(..., description="Sort direction: 'asc' or 'desc'")
|
||||
|
||||
|
|
@ -61,16 +106,13 @@ class PaginationParams(BaseModel):
|
|||
"""
|
||||
Complete pagination state including page, sorting, and filters.
|
||||
|
||||
Grouping extensions (both optional — omit when not using grouping):
|
||||
groupId — Scope the request to items belonging to this group.
|
||||
The backend resolves it to an itemIds IN-filter before
|
||||
applying normal pagination/search/filter logic.
|
||||
Also applied for mode=ids and mode=filterValues so that
|
||||
bulk-select and filter-dropdowns respect the group scope.
|
||||
saveGroupTree — If present the backend persists this tree for the current
|
||||
(user, contextKey) pair *before* fetching, then returns
|
||||
the confirmed tree in the response groupTree field.
|
||||
Omit on every request that does not change the group tree.
|
||||
View extension (optional):
|
||||
viewKey — Slug of a saved TableListView for this (user, contextKey) pair.
|
||||
The server loads the view, merges its filters/sort/groupByLevels
|
||||
into the effective query (request fields take priority over view
|
||||
defaults for explicitly provided fields), and returns groupLayout
|
||||
in the response when groupByLevels is non-empty.
|
||||
Omit or set to None for the default (ungrouped) view.
|
||||
"""
|
||||
page: int = Field(ge=1, description="Current page number (1-based)")
|
||||
pageSize: int = Field(ge=1, le=1000, description="Number of items per page")
|
||||
|
|
@ -85,13 +127,16 @@ class PaginationParams(BaseModel):
|
|||
- Supported operators: equals/eq, contains, startsWith, endsWith, gt, gte, lt, lte, in, notIn
|
||||
- Multiple filters are combined with AND logic"""
|
||||
)
|
||||
groupId: Optional[str] = Field(
|
||||
viewKey: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Scope request to items of this group (resolved server-side to itemIds IN-filter)",
|
||||
description="Slug of a saved view to load; server merges view config into effective query",
|
||||
)
|
||||
saveGroupTree: Optional[List[Dict[str, Any]]] = Field(
|
||||
groupByLevels: Optional[List[GroupByLevel]] = Field(
|
||||
default=None,
|
||||
description="If set, persist this group tree before fetching (optimistic save)",
|
||||
description=(
|
||||
"When set (including an empty list), replaces the saved view's groupByLevels for this request. "
|
||||
"Omit entirely to use grouping from the view only."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -130,16 +175,22 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
|||
"""
|
||||
Response containing paginated data and metadata.
|
||||
|
||||
groupTree is included when the endpoint supports table grouping and the
|
||||
current user has a saved group tree for the requested contextKey.
|
||||
It is None when grouping is not configured for the endpoint or the user
|
||||
has not created any groups yet. Frontend must treat None as an empty tree.
|
||||
groupLayout is included when the effective view has groupByLevels configured.
|
||||
It describes how to render group header rows in the current page's items[].
|
||||
Omitted (None) when no grouping is active.
|
||||
|
||||
appliedView describes which saved view was merged into this response,
|
||||
allowing the frontend to synchronise its view selector.
|
||||
"""
|
||||
items: List[T] = Field(..., description="Array of items for current page")
|
||||
pagination: Optional[PaginationMetadata] = Field(..., description="Pagination metadata (None if pagination not applied)")
|
||||
groupTree: Optional[List[TableGroupNode]] = Field(
|
||||
groupLayout: Optional[GroupLayout] = Field(
|
||||
default=None,
|
||||
description="Current group tree for this (user, contextKey) pair — None if no grouping configured",
|
||||
description="Group band structure for this page (None if no grouping active)",
|
||||
)
|
||||
appliedView: Optional[AppliedViewMeta] = Field(
|
||||
default=None,
|
||||
description="Metadata about the view applied to this response",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
|
@ -148,34 +199,30 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
|||
def normalize_pagination_dict(pagination_dict: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize pagination dictionary to handle frontend variations.
|
||||
Moves top-level "search" field into filters if present.
|
||||
Grouping fields (groupId, saveGroupTree) are passed through as-is.
|
||||
|
||||
Args:
|
||||
pagination_dict: Raw pagination dictionary from frontend
|
||||
|
||||
Returns:
|
||||
Normalized pagination dictionary ready for PaginationParams parsing
|
||||
- Moves top-level "search" field into filters if present.
|
||||
- Silently drops legacy fields (groupId, saveGroupTree) that were part of the
|
||||
old tree-grouping implementation so old clients do not cause validation errors.
|
||||
- Passes viewKey through unchanged.
|
||||
"""
|
||||
if not pagination_dict:
|
||||
return pagination_dict
|
||||
|
||||
# Create a copy to avoid modifying the original
|
||||
normalized = dict(pagination_dict)
|
||||
|
||||
# Ensure required fields have sensible defaults
|
||||
if "page" not in normalized:
|
||||
normalized["page"] = 1
|
||||
if "pageSize" not in normalized:
|
||||
normalized["pageSize"] = 25
|
||||
|
||||
# Move top-level "search" into filters if present
|
||||
# Move top-level "search" into filters
|
||||
if "search" in normalized:
|
||||
if "filters" not in normalized or normalized["filters"] is None:
|
||||
normalized["filters"] = {}
|
||||
normalized["filters"]["search"] = normalized.pop("search")
|
||||
|
||||
# groupId / saveGroupTree are valid PaginationParams fields — pass through unchanged.
|
||||
# No transformation needed; Pydantic will validate them.
|
||||
# Drop legacy tree-grouping fields — harmless if already absent
|
||||
normalized.pop("groupId", None)
|
||||
normalized.pop("saveGroupTree", None)
|
||||
|
||||
return normalized
|
||||
|
|
|
|||
|
|
@ -12,17 +12,30 @@ import uuid
|
|||
from typing import Dict, Any, List, Optional
|
||||
|
||||
|
||||
def _make_json_serializable(obj: Any) -> Any:
|
||||
_INTERNAL_SKIP_KEYS = frozenset({"_context", "_orderedNodes"})
|
||||
|
||||
|
||||
def _make_json_serializable(obj: Any, _depth: int = 0) -> Any:
|
||||
"""
|
||||
Recursively convert bytes to base64 strings so structures can be JSON-serialized
|
||||
for storage in JSONB columns.
|
||||
|
||||
Internal runtime keys (_context, _orderedNodes) are skipped — they hold live
|
||||
Python objects (including back-references to nodeOutputs) and must never be
|
||||
stored. A depth guard prevents runaway recursion on unexpected circular refs.
|
||||
"""
|
||||
if _depth > 50:
|
||||
return None
|
||||
if isinstance(obj, bytes):
|
||||
return base64.b64encode(obj).decode("ascii")
|
||||
if isinstance(obj, dict):
|
||||
return {k: _make_json_serializable(v) for k, v in obj.items()}
|
||||
return {
|
||||
k: _make_json_serializable(v, _depth + 1)
|
||||
for k, v in obj.items()
|
||||
if k not in _INTERNAL_SKIP_KEYS
|
||||
}
|
||||
if isinstance(obj, list):
|
||||
return [_make_json_serializable(v) for v in obj]
|
||||
return [_make_json_serializable(v, _depth + 1) for v in obj]
|
||||
return obj
|
||||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
from modules.shared.i18nRegistry import t
|
||||
|
||||
_AI_COMMON_PARAMS = [
|
||||
{"name": "requireNeutralization", "type": "boolean", "required": False,
|
||||
{"name": "requireNeutralization", "type": "bool", "required": False,
|
||||
"frontendType": "checkbox", "default": False,
|
||||
"description": t("Eingaben fuer diesen Call neutralisieren")},
|
||||
{"name": "allowedModels", "type": "array", "required": False,
|
||||
|
|
@ -19,25 +19,25 @@ AI_NODES = [
|
|||
"label": t("Prompt"),
|
||||
"description": t("Prompt eingeben und KI führt aus"),
|
||||
"parameters": [
|
||||
{"name": "aiPrompt", "type": "string", "required": True, "frontendType": "templateTextarea",
|
||||
{"name": "aiPrompt", "type": "str", "required": True, "frontendType": "templateTextarea",
|
||||
"description": t("KI-Prompt")},
|
||||
{"name": "resultType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||
"description": t("Ausgabeformat"), "default": "txt"},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste (Upstream-Output binden)"), "default": ""},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "dataRef",
|
||||
"description": t("Kontextdaten fuer den Prompt (Upstream-Output binden)"), "default": ""},
|
||||
{"name": "documentTheme", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": ""},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": t("Daten aus vorherigen Schritten"), "default": ""},
|
||||
{"name": "documentTheme", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["general", "finance", "legal", "technical", "hr"]},
|
||||
"description": t("Dokument-Thema (Style-Hinweis fuer den Renderer)"), "default": "general"},
|
||||
{"name": "simpleMode", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "simpleMode", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Einfacher Modus"), "default": True},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"DocumentList", "AiResult", "TextResult", "Transit", "LoopItem", "ActionResult",
|
||||
"FormPayload", "DocumentList", "AiResult", "TextResult", "Transit", "LoopItem", "ActionResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
|
||||
|
|
@ -50,12 +50,18 @@ AI_NODES = [
|
|||
"label": t("Web-Recherche"),
|
||||
"description": t("Recherche im Web"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Recherche-Anfrage")},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": t("Daten aus vorherigen Schritten"), "default": ""},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -68,14 +74,14 @@ AI_NODES = [
|
|||
"description": t("Dokumentinhalt zusammenfassen"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste (Upstream-Output binden)"), "default": ""},
|
||||
{"name": "summaryLength", "type": "string", "required": False, "frontendType": "select",
|
||||
"description": t("Dokumente aus vorherigen Schritten")},
|
||||
{"name": "summaryLength", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["brief", "medium", "detailed"]},
|
||||
"description": t("Kurz, mittel oder ausführlich"), "default": "medium"},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -88,13 +94,13 @@ AI_NODES = [
|
|||
"description": t("Dokument in Zielsprache übersetzen"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste (Upstream-Output binden)"), "default": ""},
|
||||
{"name": "targetLanguage", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Dokumente aus vorherigen Schritten")},
|
||||
{"name": "targetLanguage", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Zielsprache (z.B. de, en, French)")},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -107,14 +113,14 @@ AI_NODES = [
|
|||
"description": t("Dokument in anderes Format konvertieren"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste (Upstream-Output binden)"), "default": ""},
|
||||
{"name": "targetFormat", "type": "string", "required": True, "frontendType": "select",
|
||||
"description": t("Dokumente aus vorherigen Schritten")},
|
||||
{"name": "targetFormat", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "xlsx", "csv", "txt", "html", "json", "md"]},
|
||||
"description": t("Zielformat")},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -126,12 +132,26 @@ AI_NODES = [
|
|||
"label": t("Dokument generieren"),
|
||||
"description": t("Dokument aus Prompt generieren"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Generierungs-Prompt")},
|
||||
{"name": "outputFormat", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||
"description": t("Ausgabeformat"), "default": "docx"},
|
||||
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dokumenttitel (Metadaten / Dateiname)"), "default": ""},
|
||||
{"name": "documentType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["letter", "memo", "proposal", "contract", "report", "email"]},
|
||||
"description": t("Dokumentart (Inhaltshinweis fuer die KI)"), "default": "proposal"},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": t("Daten aus vorherigen Schritten"), "default": ""},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -143,15 +163,21 @@ AI_NODES = [
|
|||
"label": t("Code generieren"),
|
||||
"description": t("Code aus Beschreibung generieren"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Code-Generierungs-Prompt")},
|
||||
{"name": "resultType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["py", "js", "ts", "html", "java", "cpp", "txt", "json", "csv", "xml"]},
|
||||
"description": t("Datei-Endung der erzeugten Code-Datei"), "default": "py"},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": t("Daten aus vorherigen Schritten"), "default": ""},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -163,10 +189,10 @@ AI_NODES = [
|
|||
"label": t("KI-Konsolidierung"),
|
||||
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
|
||||
"description": t("Konsolidierungsmodus"), "default": "summarize"},
|
||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
|
|
|
|||
|
|
@ -11,23 +11,23 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgaben suchen"),
|
||||
"description": t("Aufgaben in einem Workspace suchen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "teamId", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "teamId", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Team-/Workspace-ID")},
|
||||
{"name": "query", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchbegriff")},
|
||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Seite"), "default": 0},
|
||||
{"name": "listId", "type": "string", "required": False, "frontendType": "clickupList",
|
||||
{"name": "listId", "type": "str", "required": False, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("In dieser Liste suchen")},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Erledigte einbeziehen"), "default": False},
|
||||
{"name": "fullTaskData", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "fullTaskData", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Vollständige Daten"), "default": False},
|
||||
{"name": "matchNameOnly", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "matchNameOnly", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Nur Titel"), "default": True},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -44,15 +44,15 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgaben auflisten"),
|
||||
"description": t("Aufgaben einer Liste auflisten"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "clickupList",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Pfad zur Liste")},
|
||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Seite"), "default": 0},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Erledigte einbeziehen"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -69,12 +69,12 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe abrufen"),
|
||||
"description": t("Eine Aufgabe abrufen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -91,34 +91,34 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe erstellen"),
|
||||
"description": t("Aufgabe erstellen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "clickupList",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Pfad zur Liste")},
|
||||
{"name": "listId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "listId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Listen-ID")},
|
||||
{"name": "name", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "name", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Name")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Beschreibung")},
|
||||
{"name": "taskStatus", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskStatus", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Status")},
|
||||
{"name": "taskPriority", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "taskPriority", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["1", "2", "3", "4"]},
|
||||
"description": t("Priorität 1-4")},
|
||||
{"name": "taskDueDateMs", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskDueDateMs", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Fälligkeit (ms)")},
|
||||
{"name": "taskAssigneeIds", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t("Zugewiesene")},
|
||||
{"name": "taskTimeEstimateMs", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskTimeEstimateMs", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Zeitschätzung (ms)")},
|
||||
{"name": "taskTimeEstimateHours", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskTimeEstimateHours", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Zeitschätzung (h)")},
|
||||
{"name": "customFieldValues", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t("Benutzerdefinierte Felder")},
|
||||
{"name": "taskFields", "type": "string", "required": False, "frontendType": "json",
|
||||
{"name": "taskFields", "type": "str", "required": False, "frontendType": "json",
|
||||
"description": t("Zusätzliches JSON")},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -135,14 +135,14 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe aktualisieren"),
|
||||
"description": t("Felder der Aufgabe ändern"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
{"name": "taskUpdate", "type": "string", "required": False, "frontendType": "json",
|
||||
{"name": "taskUpdate", "type": "str", "required": False, "frontendType": "json",
|
||||
"description": t("JSON-Body für PUT /task/{id}, z.B. {\"name\":\"...\",\"status\":\"...\"}")},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -159,16 +159,16 @@ CLICKUP_NODES = [
|
|||
"label": t("Anhang hochladen"),
|
||||
"description": t("Datei an Task anhängen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
{"name": "fileName", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "fileName", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dateiname")},
|
||||
{"name": "content", "type": "string", "required": True, "frontendType": "hidden",
|
||||
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ CONTEXT_NODES = [
|
|||
"label": t("Inhalt extrahieren"),
|
||||
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": True, "frontendType": "hidden",
|
||||
{"name": "documentList", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "extractionOptions", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t(
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ DATA_NODES = [
|
|||
"label": t("Sammeln"),
|
||||
"description": t("Ergebnisse aus Schleifen-Iterationen sammeln"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["collect", "concat", "sum", "count"]},
|
||||
"description": t("Aggregationsmodus"), "default": "collect"},
|
||||
],
|
||||
|
|
@ -27,9 +27,9 @@ DATA_NODES = [
|
|||
"label": t("Filtern"),
|
||||
"description": t("Elemente nach Bedingung filtern"),
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression",
|
||||
{"name": "condition", "type": "str", "required": True, "frontendType": "filterExpression",
|
||||
"description": t("Filterbedingung")},
|
||||
{"name": "udmContentType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "udmContentType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
|
||||
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
|
||||
],
|
||||
|
|
@ -46,10 +46,10 @@ DATA_NODES = [
|
|||
"label": t("Konsolidieren"),
|
||||
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
|
||||
"description": t("Konsolidierungsmodus"), "default": "table"},
|
||||
{"name": "separator", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "separator", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ EMAIL_NODES = [
|
|||
"label": t("E-Mail prüfen"),
|
||||
"description": t("Neue E-Mails prüfen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto Verbindung")},
|
||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "Inbox"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "filter", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Filter-Ausdruck (z.B. 'from:max@example.com hasAttachment:true betreff')"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -34,14 +34,14 @@ EMAIL_NODES = [
|
|||
"label": t("E-Mail suchen"),
|
||||
"description": t("E-Mails suchen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto Verbindung")},
|
||||
{"name": "query", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchausdruck (z.B. 'from:max@example.com hasAttachments:true Rechnung')")},
|
||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "All"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -59,19 +59,19 @@ EMAIL_NODES = [
|
|||
"description": t(
|
||||
"AI-gestützt einen E-Mail-Entwurf aus Kontext und optionalen Dokumenten erstellen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto")},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "templateTextarea",
|
||||
"description": t("Kontext / Brief-Beschreibung für die KI-Komposition"), "default": ""},
|
||||
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "templateTextarea",
|
||||
"description": t("Daten aus vorherigen Schritten (oder direkte Beschreibung)"), "default": ""},
|
||||
{"name": "to", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Empfänger (komma-separiert, optional für Entwurf)"), "default": ""},
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
{"name": "documentList", "type": "str", "required": False, "frontendType": "hidden",
|
||||
"description": t("Anhang-Dokumente (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "emailContent", "type": "string", "required": False, "frontendType": "hidden",
|
||||
{"name": "emailContent", "type": "str", "required": False, "frontendType": "hidden",
|
||||
"description": t("Direkt vorbereiteter Inhalt {subject, body, to} (via Wire — überspringt KI)"),
|
||||
"default": ""},
|
||||
{"name": "emailStyle", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "emailStyle", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["formal", "casual", "business"]},
|
||||
"description": t("Stil"), "default": "business"},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -10,25 +10,21 @@ FILE_NODES = [
|
|||
"label": t("Datei erstellen"),
|
||||
"description": t("Erstellt eine Datei aus Kontext (Text/Markdown von KI)."),
|
||||
"parameters": [
|
||||
{"name": "contentSources", "type": "json", "required": False, "frontendType": "json",
|
||||
"description": t("Kontext-Quellen"), "default": []},
|
||||
{"name": "outputFormat", "type": "string", "required": True, "frontendType": "select",
|
||||
{"name": "outputFormat", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||
"description": t("Ausgabeformat"), "default": "docx"},
|
||||
{"name": "title", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dokumenttitel")},
|
||||
{"name": "templateName", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["default", "corporate", "minimal"]},
|
||||
"description": t("Stil-Vorlage")},
|
||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["de", "en", "fr"]},
|
||||
"description": t("Sprache"), "default": "de"},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Inhalt (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": t("Daten aus vorherigen Schritten"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}},
|
||||
<<<<<<< HEAD
|
||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit", "FormPayload", "LoopItem", "ActionResult"]}},
|
||||
=======
|
||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit", "FormPayload"]}},
|
||||
>>>>>>> 875f8252 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
|
||||
"_method": "file",
|
||||
|
|
|
|||
|
|
@ -3,25 +3,46 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
# Ports, die typische Schritt-Ausgaben durchreichen (nicht nur leerer Transit).
|
||||
_FLOW_INPUT_SCHEMAS = [
|
||||
"Transit",
|
||||
"FormPayload",
|
||||
"AiResult",
|
||||
"TextResult",
|
||||
"ActionResult",
|
||||
"DocumentList",
|
||||
"FileList",
|
||||
"EmailList",
|
||||
"TaskList",
|
||||
"QueryResult",
|
||||
"MergeResult",
|
||||
"LoopItem",
|
||||
"BoolResult",
|
||||
"UdmDocument",
|
||||
]
|
||||
|
||||
FLOW_NODES = [
|
||||
{
|
||||
"id": "flow.ifElse",
|
||||
"category": "flow",
|
||||
"label": t("Wenn / Sonst"),
|
||||
"description": t("Verzweigung nach Bedingung"),
|
||||
"description": t(
|
||||
"Verzweigt anhand einer Bedingung auf ein vorheriges Feld oder einen Ausdruck. "
|
||||
"Die Daten vom Eingangskanal werden an den gewählten Ausgang durchgereicht."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "condition",
|
||||
"type": "string",
|
||||
"type": "json",
|
||||
"required": True,
|
||||
"frontendType": "condition",
|
||||
"description": t("Bedingung"),
|
||||
"description": t("Bedingung: Feld aus einem vorherigen Schritt und Vergleich"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 2,
|
||||
"outputLabels": [t("Ja"), t("Nein")],
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
|
||||
|
|
@ -30,26 +51,29 @@ FLOW_NODES = [
|
|||
"id": "flow.switch",
|
||||
"category": "flow",
|
||||
"label": t("Switch"),
|
||||
"description": t("Mehrere Zweige nach Wert"),
|
||||
"description": t(
|
||||
"Mehrere Zweige nach einem Wert aus einem vorherigen Schritt (Data Picker). "
|
||||
"Definiere Fälle mit Vergleichsoperator; der Eingang wird an den ersten passenden Zweig durchgereicht."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "value",
|
||||
"type": "string",
|
||||
"type": "Any",
|
||||
"required": True,
|
||||
"frontendType": "text",
|
||||
"description": t("Zu vergleichender Wert"),
|
||||
"frontendType": "dataRef",
|
||||
"description": t("Wert zum Vergleichen (Feld aus einem vorherigen Schritt)"),
|
||||
},
|
||||
{
|
||||
"name": "cases",
|
||||
"type": "array",
|
||||
"required": False,
|
||||
"frontendType": "caseList",
|
||||
"description": t("Fälle"),
|
||||
"description": t("Fälle: Operator und Vergleichswert"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||
"outputPorts": {0: {"schema": "Transit"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
|
||||
|
|
@ -57,39 +81,43 @@ FLOW_NODES = [
|
|||
{
|
||||
"id": "flow.loop",
|
||||
"category": "flow",
|
||||
"label": t("Schleife / Für Jedes"),
|
||||
"description": t("Über Array-Elemente oder UDM-Strukturebenen iterieren"),
|
||||
"label": t("Schleife / Für jedes"),
|
||||
"description": t(
|
||||
"Iteriert über ein Array aus einem vorherigen Schritt (z. B. documente, Zeilen, Listeneinträge). "
|
||||
"Optional: UDM-Ebene für strukturierte Dokumente."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "items",
|
||||
"type": "string",
|
||||
"type": "Any",
|
||||
"required": True,
|
||||
"frontendType": "text",
|
||||
"description": t("Pfad zum Array"),
|
||||
"frontendType": "dataRef",
|
||||
"description": t("Liste oder Sammlung zum Durchlaufen (im Data Picker wählen)"),
|
||||
},
|
||||
{
|
||||
"name": "level",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
|
||||
"description": t("UDM-Iterationsebene"),
|
||||
"description": t("Nur bei UDM-Daten: welche Strukturebene als Elemente verwendet wird"),
|
||||
"default": "auto",
|
||||
},
|
||||
{
|
||||
"name": "concurrency",
|
||||
"type": "number",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"frontendOptions": {"min": 1, "max": 20},
|
||||
"description": t("Parallele Iterationen (1 = sequentiell)"),
|
||||
"description": t("Parallele Durchläufe (1 = nacheinander)"),
|
||||
"default": 1,
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"Transit", "UdmDocument", "EmailList", "DocumentList", "FileList", "TaskList", "ActionResult",
|
||||
"Transit", "UdmDocument", "EmailList", "DocumentList", "FileList", "TaskList",
|
||||
"ActionResult", "AiResult", "QueryResult", "FormPayload",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "LoopItem"}},
|
||||
"executor": "flow",
|
||||
|
|
@ -99,30 +127,36 @@ FLOW_NODES = [
|
|||
"id": "flow.merge",
|
||||
"category": "flow",
|
||||
"label": t("Zusammenführen"),
|
||||
"description": t("Mehrere Zweige zusammenführen (2-5 Eingänge)"),
|
||||
"description": t(
|
||||
"Führt 2–5 Zweige zusammen, wenn alle verbunden sind. "
|
||||
"Modus legt fest, wie die Eingabeobjekte im Ergebnis kombiniert werden."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "mode",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["first", "all", "append"]},
|
||||
"description": t("Zusammenführungsmodus"),
|
||||
"description": t("first: erster Zweig; all: Dict-Felder zusammenführen; append: Listen anhängen"),
|
||||
"default": "first",
|
||||
},
|
||||
{
|
||||
"name": "inputCount",
|
||||
"type": "number",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"frontendOptions": {"min": 2, "max": 5},
|
||||
"description": t("Anzahl Eingänge"),
|
||||
"description": t("Anzahl Eingänge dieses Nodes (2–5)"),
|
||||
"default": 2,
|
||||
},
|
||||
],
|
||||
"inputs": 2,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {
|
||||
0: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||
1: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||
},
|
||||
"outputPorts": {0: {"schema": "MergeResult"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
|
||||
|
|
|
|||
|
|
@ -3,6 +3,18 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
# Canonical form field types — single source of truth.
|
||||
# portType maps to the PORT_TYPE_CATALOG primitive used by DataPicker / validateGraph.
|
||||
FORM_FIELD_TYPES = [
|
||||
{"id": "text", "label": "Text (einzeilig)", "portType": "str"},
|
||||
{"id": "textarea", "label": "Text (mehrzeilig)", "portType": "str"},
|
||||
{"id": "number", "label": "Zahl", "portType": "int"},
|
||||
{"id": "boolean", "label": "Ja/Nein", "portType": "bool"},
|
||||
{"id": "date", "label": "Datum", "portType": "str"},
|
||||
{"id": "email", "label": "E-Mail", "portType": "str"},
|
||||
{"id": "select", "label": "Auswahl", "portType": "str"},
|
||||
]
|
||||
|
||||
INPUT_NODES = [
|
||||
{
|
||||
"id": "input.form",
|
||||
|
|
@ -32,11 +44,11 @@ INPUT_NODES = [
|
|||
"label": t("Genehmigung"),
|
||||
"description": t("Benutzer genehmigt oder lehnt ab"),
|
||||
"parameters": [
|
||||
{"name": "title", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "title", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Genehmigungstitel")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Was genehmigt werden soll")},
|
||||
{"name": "approvalType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "approvalType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["generic", "document"]},
|
||||
"description": t("Typ: document oder generic"), "default": "generic"},
|
||||
],
|
||||
|
|
@ -53,14 +65,14 @@ INPUT_NODES = [
|
|||
"label": t("Upload"),
|
||||
"description": t("Benutzer lädt Datei(en) hoch"),
|
||||
"parameters": [
|
||||
{"name": "accept", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "accept", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Accept-String"), "default": ""},
|
||||
{"name": "allowedTypes", "type": "json", "required": False, "frontendType": "multiselect",
|
||||
"frontendOptions": {"options": ["pdf", "docx", "xlsx", "pptx", "txt", "csv", "jpg", "png", "gif"]},
|
||||
"description": t("Ausgewählte Dateitypen"), "default": []},
|
||||
{"name": "maxSize", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "maxSize", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max. Dateigröße in MB"), "default": 10},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mehrere Dateien erlauben"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -76,9 +88,9 @@ INPUT_NODES = [
|
|||
"label": t("Kommentar"),
|
||||
"description": t("Benutzer fügt einen Kommentar hinzu"),
|
||||
"parameters": [
|
||||
{"name": "placeholder", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "placeholder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Platzhalter"), "default": ""},
|
||||
{"name": "required", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "required", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Kommentar erforderlich"), "default": True},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -94,9 +106,9 @@ INPUT_NODES = [
|
|||
"label": t("Prüfung"),
|
||||
"description": t("Benutzer prüft Inhalt"),
|
||||
"parameters": [
|
||||
{"name": "contentRef", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "contentRef", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Referenz auf Inhalt")},
|
||||
{"name": "reviewType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "reviewType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["generic", "document"]},
|
||||
"description": t("Art der Prüfung"), "default": "generic"},
|
||||
],
|
||||
|
|
@ -115,7 +127,7 @@ INPUT_NODES = [
|
|||
"parameters": [
|
||||
{"name": "options", "type": "json", "required": True, "frontendType": "keyValueRows",
|
||||
"description": t("Optionen"), "default": []},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mehrfachauswahl erlauben"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -131,11 +143,11 @@ INPUT_NODES = [
|
|||
"label": t("Bestätigung"),
|
||||
"description": t("Benutzer bestätigt Ja/Nein"),
|
||||
"parameters": [
|
||||
{"name": "question", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "question", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Zu bestätigende Frage")},
|
||||
{"name": "confirmLabel", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "confirmLabel", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Label für Bestätigen-Button"), "default": "Confirm"},
|
||||
{"name": "rejectLabel", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "rejectLabel", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Label für Ablehnen-Button"), "default": "Reject"},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ REDMINE_NODES = [
|
|||
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Redmine-Ticket-ID")},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -43,17 +43,17 @@ REDMINE_NODES = [
|
|||
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||
{"name": "status", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "status", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Status-Filter: open | closed | *"), "default": "*"},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Nur Tickets dieses Benutzers (ID)")},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -71,21 +71,21 @@ REDMINE_NODES = [
|
|||
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "subject", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Ticket-Titel")},
|
||||
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
|
||||
{"name": "trackerId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Ticket-Beschreibung"), "default": ""},
|
||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Status-ID (optional)")},
|
||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Prioritaet-ID (optional)")},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Zugewiesene Benutzer-ID (optional)")},
|
||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Uebergeordnetes Ticket (optional)")},
|
||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -103,25 +103,25 @@ REDMINE_NODES = [
|
|||
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Ticket-ID")},
|
||||
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "subject", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Neuer Titel")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Neue Beschreibung")},
|
||||
{"name": "trackerId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "trackerId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neuer Tracker")},
|
||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neuer Status")},
|
||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neue Prioritaet")},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neue Zuweisung")},
|
||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neues Parent-Ticket")},
|
||||
{"name": "notes", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "notes", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
|
||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -139,13 +139,13 @@ REDMINE_NODES = [
|
|||
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum ab")},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum bis")},
|
||||
{"name": "bucket", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "bucket", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Bucket: day | week | month"), "default": "week"},
|
||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -163,7 +163,7 @@ REDMINE_NODES = [
|
|||
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
||||
"parameters": [
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "force", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei finden"),
|
||||
"description": t("Datei nach Pfad oder Suche finden"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "searchQuery", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "searchQuery", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchanfrage oder Pfad")},
|
||||
{"name": "site", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "site", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Optionaler Site-Hinweis"), "default": ""},
|
||||
{"name": "maxResults", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "maxResults", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max Ergebnisse"), "default": 1000},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -34,10 +34,10 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei lesen"),
|
||||
"description": t("Inhalt aus Datei extrahieren"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Dateipfad")},
|
||||
],
|
||||
|
|
@ -55,13 +55,13 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei hochladen"),
|
||||
"description": t("Datei zu SharePoint hochladen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Zielordner-Pfad")},
|
||||
{"name": "content", "type": "string", "required": True, "frontendType": "hidden",
|
||||
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -78,10 +78,10 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Dateien auflisten"),
|
||||
"description": t("Dateien in Ordner auflisten"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Ordnerpfad"), "default": "/"},
|
||||
],
|
||||
|
|
@ -99,10 +99,10 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei herunterladen"),
|
||||
"description": t("Datei vom Pfad herunterladen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Vollständiger Dateipfad")},
|
||||
],
|
||||
|
|
@ -120,13 +120,13 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei kopieren"),
|
||||
"description": t("Datei an Ziel kopieren"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "sourcePath", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "sourcePath", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Quelldatei-Pfad")},
|
||||
{"name": "destPath", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||
{"name": "destPath", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Zielordner")},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ TRIGGER_NODES = [
|
|||
"parameters": [
|
||||
{
|
||||
"name": "cron",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "cron",
|
||||
"description": t("Cron-Ausdruck"),
|
||||
|
|
|
|||
|
|
@ -25,11 +25,11 @@ TRUSTEE_NODES = [
|
|||
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
|
||||
"parameters": [
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "forceRefresh", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Import erzwingen"), "default": False},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Startdatum"), "default": ""},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Enddatum"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -46,14 +46,14 @@ TRUSTEE_NODES = [
|
|||
"label": t("Dokumente extrahieren"),
|
||||
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": False, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": False, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung"), "default": ""},
|
||||
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||
{"name": "sharepointFolder", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("SharePoint-Ordnerpfad"), "default": ""},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("AI-Prompt für Extraktion"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -77,7 +77,7 @@ TRUSTEE_NODES = [
|
|||
# is List[ActionDocument] (see datamodelChat.ActionResult). The
|
||||
# DataPicker uses this string to filter compatible upstream paths.
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste — gebunden via DataRef.")},
|
||||
"description": t("Dokumente aus vorherigen Schritten")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -95,7 +95,7 @@ TRUSTEE_NODES = [
|
|||
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Verarbeitete Dokumentenliste — gebunden via DataRef.")},
|
||||
"description": t("Dokumente aus vorherigen Schritten")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -113,25 +113,25 @@ TRUSTEE_NODES = [
|
|||
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
||||
"parameters": [
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
||||
"description": t("Abfragemodus"), "default": "lookup"},
|
||||
{"name": "entity", "type": "string", "required": True, "frontendType": "select",
|
||||
{"name": "entity", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["tenantWithRent", "contact", "journalLines", "accounts", "balances"]},
|
||||
"description": t("Entität, die gelesen werden soll"), "default": "tenantWithRent"},
|
||||
{"name": "tenantNameRef", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "tenantNameRef", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||
"description": t("Mietername (oder {{wire.feld}} aus Upstream)"), "default": ""},
|
||||
{"name": "tenantAddressRef", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "tenantAddressRef", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||
"description": t("Mieteradresse (Toleranz für Tippfehler)"), "default": ""},
|
||||
{"name": "period", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "period", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "journalLines", "balances"]},
|
||||
"description": t("Zeitraum (YYYY oder YYYY-MM-DD/YYYY-MM-DD)"), "default": ""},
|
||||
{"name": "rentAccountPattern", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "rentAccountPattern", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent"]},
|
||||
"description": t("Konto-Filter für Mietzins (z.B. '6000-6099' oder '6*')"), "default": ""},
|
||||
{"name": "filterJson", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "filterJson", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"frontendOptions": {"dependsOn": "mode", "showWhen": ["raw", "aggregate"]},
|
||||
"description": t("Optionaler JSON-Filter für mode=raw/aggregate"), "default": ""},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import logging
|
|||
from typing import Dict, List, Any, Optional
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.features.graphicalEditor.nodeDefinitions.input import FORM_FIELD_TYPES
|
||||
from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
|
||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
|
||||
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
|
||||
|
|
@ -119,6 +120,7 @@ def getNodeTypesForApi(
|
|||
"categories": categories,
|
||||
"portTypeCatalog": catalogSerialized,
|
||||
"systemVariables": SYSTEM_VARIABLES,
|
||||
"formFieldTypes": FORM_FIELD_TYPES,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ class PortField(BaseModel):
|
|||
# FeatureInstanceRef.featureCode). Pickers/validators use it to filter compatible
|
||||
# producers by sub-type. Type must be "str" when discriminator is True.
|
||||
discriminator: bool = False
|
||||
# Surfaces this field at the top of the DataPicker list as the most common pick.
|
||||
recommended: bool = False
|
||||
|
||||
|
||||
class PortSchema(BaseModel):
|
||||
|
|
@ -153,7 +155,7 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
]),
|
||||
"DocumentList": PortSchema(name="DocumentList", fields=[
|
||||
PortField(name="documents", type="List[Document]",
|
||||
description="Dokumentenliste"),
|
||||
description="Dokumente aus vorherigen Schritten", recommended=True),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Verbindung, mit der die Liste erzeugt wurde"),
|
||||
PortField(name="source", type="SharePointFolderRef", required=False,
|
||||
|
|
@ -219,9 +221,9 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
PortField(name="prompt", type="str",
|
||||
description="Prompt"),
|
||||
PortField(name="response", type="str",
|
||||
description="Antworttext"),
|
||||
description="Antworttext", recommended=True),
|
||||
PortField(name="responseData", type="Dict", required=False,
|
||||
description="Strukturierte Antwort"),
|
||||
description="Strukturierte Antwort (nur bei JSON-Ausgabe)"),
|
||||
PortField(name="context", type="str",
|
||||
description="Kontext"),
|
||||
PortField(name="documents", type="List[Document]",
|
||||
|
|
@ -642,6 +644,69 @@ def resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
|
|||
# Output normalizers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _file_record_to_document(f: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Map API / task-upload file dicts onto PortSchema ``Document`` fields."""
|
||||
if f is None:
|
||||
return None
|
||||
if isinstance(f, str) and f.strip():
|
||||
return {"id": f.strip()}
|
||||
if not isinstance(f, dict):
|
||||
return None
|
||||
inner = f.get("file") if isinstance(f.get("file"), dict) else None
|
||||
src = inner or f
|
||||
out: Dict[str, Any] = {}
|
||||
fid = src.get("id") or f.get("id")
|
||||
if fid is not None and str(fid).strip():
|
||||
out["id"] = str(fid).strip()
|
||||
name = (
|
||||
src.get("name")
|
||||
or src.get("fileName")
|
||||
or f.get("fileName")
|
||||
or f.get("name")
|
||||
)
|
||||
if name is not None and str(name).strip():
|
||||
out["name"] = str(name).strip()
|
||||
mime = src.get("mimeType") or src.get("mime") or f.get("mimeType")
|
||||
if mime is not None and str(mime).strip():
|
||||
out["mimeType"] = str(mime).strip()
|
||||
for k in ("sizeBytes", "downloadUrl", "filePath"):
|
||||
v = src.get(k) if k in src else f.get(k)
|
||||
if v is not None and v != "":
|
||||
out[k] = v
|
||||
return out if out else None
|
||||
|
||||
|
||||
def _coerce_document_list_upload_fields(result: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Human task ``input.upload`` completes with ``file`` / ``files`` / ``fileIds``.
|
||||
DocumentList expects ``documents``. Without this, resume adds ``documents: []`` and drops the real files.
|
||||
"""
|
||||
docs = result.get("documents")
|
||||
if isinstance(docs, list) and len(docs) > 0:
|
||||
return
|
||||
collected: List[Dict[str, Any]] = []
|
||||
files = result.get("files")
|
||||
if isinstance(files, list):
|
||||
for item in files:
|
||||
d = _file_record_to_document(item)
|
||||
if d:
|
||||
collected.append(d)
|
||||
if not collected:
|
||||
single = result.get("file")
|
||||
d = _file_record_to_document(single)
|
||||
if d:
|
||||
collected.append(d)
|
||||
if not collected and isinstance(result.get("fileIds"), list):
|
||||
for fid in result["fileIds"]:
|
||||
if fid is not None and str(fid).strip():
|
||||
collected.append({"id": str(fid).strip()})
|
||||
if not collected:
|
||||
return
|
||||
result["documents"] = collected
|
||||
if not result.get("count"):
|
||||
result["count"] = len(collected)
|
||||
|
||||
|
||||
def normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize raw executor output to match the declared port schema.
|
||||
|
|
@ -658,8 +723,17 @@ def normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
|
|||
if not schema or schemaName == "Transit":
|
||||
return result
|
||||
|
||||
<<<<<<< HEAD
|
||||
if schemaName == "DocumentList":
|
||||
_coerce_document_list_upload_fields(result)
|
||||
|
||||
=======
|
||||
>>>>>>> 875f8252 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
# Only default **required** fields. Optional fields stay absent so DataRefs / context
|
||||
# resolution never pick a synthetic `{}` or `[]` (e.g. AiResult.responseData when the
|
||||
# model returned plain text only).
|
||||
for field in schema.fields:
|
||||
if field.name not in result:
|
||||
if field.name not in result and field.required:
|
||||
result[field.name] = _defaultForType(field.type)
|
||||
|
||||
return result
|
||||
|
|
@ -740,6 +814,9 @@ def _resolveTransitChain(
|
|||
|
||||
def deriveFormPayloadSchemaFromParam(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
|
||||
"""Derive output schema from a field-builder JSON list (``fields``, ``formFields``, …)."""
|
||||
from modules.features.graphicalEditor.nodeDefinitions.input import FORM_FIELD_TYPES
|
||||
_FORM_TYPE_TO_PORT: Dict[str, str] = {f["id"]: f["portType"] for f in FORM_FIELD_TYPES}
|
||||
|
||||
fields_param = (node.get("parameters") or {}).get(param_key)
|
||||
if not fields_param or not isinstance(fields_param, list):
|
||||
return None
|
||||
|
|
@ -749,9 +826,11 @@ def deriveFormPayloadSchemaFromParam(node: Dict[str, Any], param_key: str) -> Op
|
|||
_desc = resolveText(lab) if lab is not None else fname
|
||||
if not str(_desc).strip():
|
||||
_desc = fname
|
||||
raw_type = str(ftype) if ftype is not None else "str"
|
||||
port_type = _FORM_TYPE_TO_PORT.get(raw_type, raw_type)
|
||||
portFields.append(PortField(
|
||||
name=fname,
|
||||
type=str(ftype) if ftype is not None else "str",
|
||||
type=port_type,
|
||||
description=_desc,
|
||||
required=required,
|
||||
))
|
||||
|
|
|
|||
|
|
@ -4028,58 +4028,92 @@ class AppObjects:
|
|||
raise
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Table Grouping (user-defined groups for FormGeneratorTable instances)
|
||||
# Table List Views (saved display presets: filters, sort, groupByLevels)
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def getTableGrouping(self, contextKey: str):
|
||||
"""
|
||||
Load the group tree for the current user and the given contextKey.
|
||||
|
||||
Returns a TableGrouping instance or None if no grouping has been saved yet.
|
||||
contextKey identifies the table instance, e.g. "connections", "prompts",
|
||||
"admin/users", "trustee/{instanceId}/documents".
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGrouping
|
||||
def getTableListViews(self, contextKey: str) -> list:
|
||||
"""Return all saved views for the current user and contextKey."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
records = self.db.getRecordset(
|
||||
TableGrouping,
|
||||
rows = self.db.getRecordset(
|
||||
TableListView,
|
||||
recordFilter={"userId": str(self.userId), "contextKey": contextKey},
|
||||
)
|
||||
if not records:
|
||||
return None
|
||||
row = records[0]
|
||||
return TableGrouping.model_validate(row) if isinstance(row, dict) else row
|
||||
except Exception as e:
|
||||
logger.error(f"getTableGrouping failed for user={self.userId} key={contextKey}: {e}")
|
||||
return None
|
||||
|
||||
def upsertTableGrouping(self, contextKey: str, rootGroups: list):
|
||||
"""
|
||||
Create or replace the group tree for the current user and contextKey.
|
||||
|
||||
rootGroups is a list of TableGroupNode-compatible dicts (the full tree).
|
||||
Returns the saved TableGrouping instance.
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGrouping
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
result = []
|
||||
for row in (rows or []):
|
||||
try:
|
||||
existing = self.getTableGrouping(contextKey)
|
||||
result.append(TableListView.model_validate(row) if isinstance(row, dict) else row)
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"getTableListViews failed for user={self.userId} context={contextKey}: {e}")
|
||||
return []
|
||||
|
||||
def getTableListView(self, contextKey: str, viewKey: str):
|
||||
"""Return one view by viewKey or None if not found."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
rows = self.db.getRecordset(
|
||||
TableListView,
|
||||
recordFilter={"userId": str(self.userId), "contextKey": contextKey, "viewKey": viewKey},
|
||||
)
|
||||
if not rows:
|
||||
return None
|
||||
row = rows[0]
|
||||
return TableListView.model_validate(row) if isinstance(row, dict) else row
|
||||
except Exception as e:
|
||||
logger.error(f"getTableListView failed for user={self.userId} key={viewKey}: {e}")
|
||||
return None
|
||||
|
||||
def createTableListView(self, contextKey: str, viewKey: str, displayName: str, config: dict):
|
||||
"""Create a new view. Raises ValueError if viewKey already exists for this context."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
if self.getTableListView(contextKey=contextKey, viewKey=viewKey) is not None:
|
||||
raise ValueError(f"View '{viewKey}' already exists for context '{contextKey}'")
|
||||
data = {
|
||||
"id": existing.id if existing else str(uuid.uuid4()),
|
||||
"id": str(uuid.uuid4()),
|
||||
"userId": str(self.userId),
|
||||
"contextKey": contextKey,
|
||||
"rootGroups": rootGroups,
|
||||
"viewKey": viewKey,
|
||||
"displayName": displayName,
|
||||
"config": config,
|
||||
"updatedAt": getUtcTimestamp(),
|
||||
}
|
||||
if existing:
|
||||
self.db.recordModify(TableGrouping, existing.id, data)
|
||||
else:
|
||||
self.db.recordCreate(TableGrouping, data)
|
||||
return TableGrouping.model_validate(data)
|
||||
try:
|
||||
self.db.recordCreate(TableListView, data)
|
||||
return TableListView.model_validate(data)
|
||||
except Exception as e:
|
||||
logger.error(f"upsertTableGrouping failed for user={self.userId} key={contextKey}: {e}")
|
||||
logger.error(f"createTableListView failed: {e}")
|
||||
raise
|
||||
|
||||
def updateTableListView(self, viewId: str, updates: dict):
|
||||
"""Update an existing view by its primary key id."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
try:
|
||||
updates = {**updates, "updatedAt": getUtcTimestamp()}
|
||||
self.db.recordModify(TableListView, viewId, updates)
|
||||
rows = self.db.getRecordset(TableListView, recordFilter={"id": viewId})
|
||||
if rows:
|
||||
row = rows[0]
|
||||
return TableListView.model_validate(row) if isinstance(row, dict) else row
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"updateTableListView failed for id={viewId}: {e}")
|
||||
raise
|
||||
|
||||
def deleteTableListView(self, viewId: str) -> bool:
|
||||
"""Delete a view by primary key id. Returns True on success."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
self.db.recordDelete(TableListView, viewId)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"deleteTableListView failed for id={viewId}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Public Methods
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetP
|
|||
from modules.security.rbac import RbacClass
|
||||
from modules.datamodels.datamodelRbac import AccessRuleContext
|
||||
from modules.datamodels.datamodelUam import AccessLevel
|
||||
from modules.datamodels.datamodelFiles import FilePreview, FileItem, FileData
|
||||
from modules.datamodels.datamodelFiles import FilePreview, FileItem, FileData, FileFolder
|
||||
from modules.datamodels.datamodelUtils import Prompt
|
||||
from modules.datamodels.datamodelMessaging import (
|
||||
MessagingSubscription,
|
||||
|
|
@ -115,6 +115,14 @@ class ComponentObjects:
|
|||
# Update database context
|
||||
self.db.updateContext(self.userId)
|
||||
|
||||
def _effective_user_id(self) -> Optional[str]:
|
||||
"""User id for audit + FileData writes; singleton hub may unset userId but keep currentUser."""
|
||||
if self.userId:
|
||||
return self.userId
|
||||
if self.currentUser is not None:
|
||||
return getattr(self.currentUser, "id", None)
|
||||
return None
|
||||
|
||||
def __del__(self):
|
||||
"""Cleanup method to close database connection."""
|
||||
if hasattr(self, 'db') and self.db is not None:
|
||||
|
|
@ -1068,6 +1076,241 @@ class ComponentObjects:
|
|||
logger.error(f"Error converting file record: {str(e)}")
|
||||
return None
|
||||
|
||||
# ── Folder methods ─────────────────────────────────────────────────────────
|
||||
|
||||
def getOwnFolderTree(self) -> List[Dict[str, Any]]:
|
||||
"""Folders owned by the current user, filtered via RBAC."""
|
||||
return getRecordsetWithRBAC(
|
||||
self.db, FileFolder, self.currentUser,
|
||||
recordFilter={"sysCreatedBy": self.userId},
|
||||
mandateId=self.mandateId,
|
||||
featureInstanceId=self.featureInstanceId,
|
||||
)
|
||||
|
||||
def getSharedFolderTree(self) -> List[Dict[str, Any]]:
|
||||
"""Folders visible via scope but NOT owned by the current user.
|
||||
Adds contextOrphan=True when a folder's parentId is not in the result set."""
|
||||
allFolders = getRecordsetWithRBAC(
|
||||
self.db, FileFolder, self.currentUser,
|
||||
mandateId=self.mandateId,
|
||||
featureInstanceId=self.featureInstanceId,
|
||||
)
|
||||
shared = [f for f in allFolders if f.get("sysCreatedBy") != self.userId]
|
||||
sharedIds = {f["id"] for f in shared}
|
||||
for f in shared:
|
||||
f["contextOrphan"] = bool(f.get("parentId") and f["parentId"] not in sharedIds)
|
||||
return shared
|
||||
|
||||
def getFolder(self, folderId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Return a single folder dict or None."""
|
||||
results = getRecordsetWithRBAC(
|
||||
self.db, FileFolder, self.currentUser,
|
||||
recordFilter={"id": folderId},
|
||||
mandateId=self.mandateId,
|
||||
featureInstanceId=self.featureInstanceId,
|
||||
)
|
||||
return results[0] if results else None
|
||||
|
||||
def _isFolderOwner(self, folder) -> bool:
|
||||
createdBy = (
|
||||
getattr(folder, "sysCreatedBy", None)
|
||||
or (folder.get("sysCreatedBy") if isinstance(folder, dict) else None)
|
||||
)
|
||||
return createdBy == self.userId
|
||||
|
||||
def _requireFolderWriteAccess(self, folder, folderId: str, operation: str = "update"):
|
||||
"""Raise PermissionError if the user cannot mutate this folder.
|
||||
Owners always can. Non-owners need RBAC ALL level."""
|
||||
if self._isFolderOwner(folder):
|
||||
return
|
||||
from modules.interfaces.interfaceRbac import buildDataObjectKey
|
||||
objectKey = buildDataObjectKey("FileFolder")
|
||||
permissions = self.rbac.getUserPermissions(
|
||||
self.currentUser, AccessRuleContext.DATA, objectKey,
|
||||
mandateId=self.mandateId, featureInstanceId=self.featureInstanceId,
|
||||
)
|
||||
level = getattr(permissions, operation, None)
|
||||
if level != AccessLevel.ALL:
|
||||
raise PermissionError(
|
||||
f"No permission to {operation} folder {folderId} (not owner, access level: {level})"
|
||||
)
|
||||
|
||||
def createFolder(self, name: str, parentId: Optional[str] = None) -> Dict[str, Any]:
|
||||
if not self.checkRbacPermission(FileFolder, "create"):
|
||||
raise PermissionError("No permission to create folders")
|
||||
folder = FileFolder(
|
||||
name=name,
|
||||
parentId=parentId,
|
||||
mandateId=self.mandateId or "",
|
||||
featureInstanceId=self.featureInstanceId or "",
|
||||
scope="personal",
|
||||
neutralize=False,
|
||||
)
|
||||
self.db.recordCreate(FileFolder, folder)
|
||||
return folder.model_dump()
|
||||
|
||||
def renameFolder(self, folderId: str, newName: str) -> Dict[str, Any]:
|
||||
folder = self.getFolder(folderId)
|
||||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||
self.db.recordModify(FileFolder, folderId, {"name": newName})
|
||||
folder["name"] = newName
|
||||
return folder
|
||||
|
||||
def moveFolder(self, folderId: str, newParentId: Optional[str] = None) -> Dict[str, Any]:
|
||||
folder = self.getFolder(folderId)
|
||||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||
|
||||
if newParentId:
|
||||
parent = self.getFolder(newParentId)
|
||||
if not parent:
|
||||
raise FileNotFoundError(f"Target parent folder {newParentId} not found")
|
||||
self._requireFolderWriteAccess(parent, newParentId, "update")
|
||||
# Circular-reference guard: newParentId must not be a descendant of folderId
|
||||
if self._isDescendant(newParentId, folderId):
|
||||
raise ValueError(f"Cannot move folder into its own subtree (circular reference)")
|
||||
|
||||
self.db.recordModify(FileFolder, folderId, {"parentId": newParentId})
|
||||
folder["parentId"] = newParentId
|
||||
return folder
|
||||
|
||||
def _isDescendant(self, candidateId: str, ancestorId: str) -> bool:
|
||||
"""Return True if candidateId is a descendant of (or equal to) ancestorId."""
|
||||
visited = set()
|
||||
current = candidateId
|
||||
while current:
|
||||
if current == ancestorId:
|
||||
return True
|
||||
if current in visited:
|
||||
break
|
||||
visited.add(current)
|
||||
f = self.getFolder(current)
|
||||
current = f.get("parentId") if f else None
|
||||
return False
|
||||
|
||||
def deleteFolderCascade(self, folderId: str) -> Dict[str, Any]:
|
||||
"""Delete a folder and all owned sub-folders + their files."""
|
||||
folder = self.getFolder(folderId)
|
||||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
self._requireFolderWriteAccess(folder, folderId, "delete")
|
||||
|
||||
folderIds = self._collectChildFolderIds(folderId)
|
||||
|
||||
# Verify all child folders are owned
|
||||
for fid in folderIds:
|
||||
if fid == folderId:
|
||||
continue
|
||||
child = self.getFolder(fid)
|
||||
if child and not self._isFolderOwner(child):
|
||||
raise PermissionError(f"Cannot delete folder tree: sub-folder {fid} is not owned by you")
|
||||
|
||||
# Collect files in those folders
|
||||
fileRows = []
|
||||
for fid in folderIds:
|
||||
items = self.db.getRecordset(FileItem, recordFilter={"folderId": fid})
|
||||
fileRows.extend(items)
|
||||
|
||||
for item in fileRows:
|
||||
itemOwner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||
if itemOwner != self.userId:
|
||||
itemId = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||
raise PermissionError(f"Cannot delete folder tree: file {itemId} is not owned by you")
|
||||
|
||||
fileIds = [
|
||||
(item.get("id") if isinstance(item, dict) else getattr(item, "id", None))
|
||||
for item in fileRows
|
||||
]
|
||||
|
||||
# Single transaction: delete FileData, FileItem, then FileFolder (children first)
|
||||
self.db._ensure_connection()
|
||||
try:
|
||||
with self.db.connection.cursor() as cursor:
|
||||
if fileIds:
|
||||
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (fileIds,))
|
||||
cursor.execute('DELETE FROM "FileItem" WHERE "id" = ANY(%s)', (fileIds,))
|
||||
orderedIds = list(folderIds)
|
||||
orderedIds.remove(folderId)
|
||||
orderedIds.append(folderId)
|
||||
if orderedIds:
|
||||
cursor.execute('DELETE FROM "FileFolder" WHERE "id" = ANY(%s)', (orderedIds,))
|
||||
self.db.connection.commit()
|
||||
except Exception:
|
||||
self.db.connection.rollback()
|
||||
raise
|
||||
|
||||
return {"deletedFolders": len(folderIds), "deletedFiles": len(fileIds)}
|
||||
|
||||
def _collectChildFolderIds(self, folderId: str) -> List[str]:
|
||||
"""BFS to collect folderId + all descendant folder IDs owned by user."""
|
||||
result = [folderId]
|
||||
queue = [folderId]
|
||||
while queue:
|
||||
parentId = queue.pop(0)
|
||||
children = self.db.getRecordset(FileFolder, recordFilter={"parentId": parentId})
|
||||
for child in children:
|
||||
cid = child.get("id") if isinstance(child, dict) else getattr(child, "id", None)
|
||||
if cid and cid not in result:
|
||||
result.append(cid)
|
||||
queue.append(cid)
|
||||
return result
|
||||
|
||||
def patchFolderScope(self, folderId: str, scope: str, cascadeToFiles: bool = False) -> Dict[str, Any]:
|
||||
validScopes = {"personal", "featureInstance", "mandate", "global"}
|
||||
if scope not in validScopes:
|
||||
raise ValueError(f"Invalid scope: {scope}. Must be one of {validScopes}")
|
||||
|
||||
folder = self.getFolder(folderId)
|
||||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||
|
||||
if scope == "global":
|
||||
from modules.interfaces.interfaceRbac import buildDataObjectKey
|
||||
objectKey = buildDataObjectKey("FileFolder")
|
||||
permissions = self.rbac.getUserPermissions(
|
||||
self.currentUser, AccessRuleContext.DATA, objectKey,
|
||||
mandateId=self.mandateId, featureInstanceId=self.featureInstanceId,
|
||||
)
|
||||
if getattr(permissions, "update", None) != AccessLevel.ALL:
|
||||
raise PermissionError("Setting global scope requires ALL permission")
|
||||
|
||||
self.db.recordModify(FileFolder, folderId, {"scope": scope})
|
||||
|
||||
filesUpdated = 0
|
||||
if cascadeToFiles:
|
||||
items = self.db.getRecordset(FileItem, recordFilter={"folderId": folderId})
|
||||
for item in items:
|
||||
owner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||
if owner == self.userId:
|
||||
iid = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||
self.db.recordModify(FileItem, iid, {"scope": scope})
|
||||
filesUpdated += 1
|
||||
|
||||
return {"folderId": folderId, "scope": scope, "filesUpdated": filesUpdated}
|
||||
|
||||
def patchFolderNeutralize(self, folderId: str, neutralize: bool) -> Dict[str, Any]:
|
||||
folder = self.getFolder(folderId)
|
||||
if not folder:
|
||||
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||
|
||||
self.db.recordModify(FileFolder, folderId, {"neutralize": neutralize})
|
||||
|
||||
items = self.db.getRecordset(FileItem, recordFilter={"folderId": folderId})
|
||||
filesUpdated = 0
|
||||
for item in items:
|
||||
owner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||
if owner == self.userId:
|
||||
iid = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||
self.db.recordModify(FileItem, iid, {"neutralize": neutralize})
|
||||
filesUpdated += 1
|
||||
|
||||
return {"folderId": folderId, "neutralize": neutralize, "filesUpdated": filesUpdated}
|
||||
|
||||
def _isfileNameUnique(self, fileName: str, excludeFileId: Optional[str] = None) -> bool:
|
||||
"""Checks if a fileName is unique for the current user."""
|
||||
# Get all files filtered by RBAC (will be filtered by user's access level)
|
||||
|
|
@ -1144,9 +1387,30 @@ class ComponentObjects:
|
|||
fileSize=fileSize,
|
||||
fileHash=fileHash,
|
||||
)
|
||||
# Ensure audit user is always stored: workflow/singleton contexts sometimes leave
|
||||
# the connector without _current_user_id, so _saveRecord skips sysCreatedBy →
|
||||
# getFile/createFileData RBAC then breaks (None != self.userId).
|
||||
uid = self._effective_user_id()
|
||||
if uid:
|
||||
fileItem = fileItem.model_copy(update={"sysCreatedBy": str(uid)})
|
||||
|
||||
# Store in database
|
||||
self.db.recordCreate(FileItem, fileItem)
|
||||
verify = self.db.getRecordset(FileItem, recordFilter={"id": fileItem.id})
|
||||
verify_creator = (verify[0].get("sysCreatedBy") if verify else None)
|
||||
logger.info(
|
||||
"createFile: id=%s name=%s scope=%s model_sysCreatedBy=%r db_sysCreatedBy=%r mandateId=%r featureInstanceId=%r "
|
||||
"verify_rows=%s db=%s",
|
||||
fileItem.id,
|
||||
uniqueName,
|
||||
fileItem.scope,
|
||||
getattr(fileItem, "sysCreatedBy", None),
|
||||
verify_creator,
|
||||
mandateId or None,
|
||||
featureInstanceId if featureInstanceId else None,
|
||||
len(verify) if verify else 0,
|
||||
getattr(self.db, "dbDatabase", "?"),
|
||||
)
|
||||
|
||||
return fileItem
|
||||
|
||||
|
|
@ -1268,43 +1532,7 @@ class ComponentObjects:
|
|||
raise FileDeletionError(f"Error deleting files in batch: {str(e)}")
|
||||
|
||||
def _ensureFeatureInstanceGroup(self, featureInstanceId: str, contextKey: str = "files/list") -> Optional[str]:
|
||||
"""Return the groupId of the default group for a feature instance.
|
||||
Creates the group if it doesn't exist yet."""
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(self._currentUser)
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
# Look for group with name matching featureInstanceId
|
||||
def _find(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
nmeta = nd.get("meta", {}) if isinstance(nd, dict) else getattr(nd, "meta", {})
|
||||
if (nmeta or {}).get("featureInstanceId") == featureInstanceId:
|
||||
return nid
|
||||
subs = nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])
|
||||
result = _find(subs)
|
||||
if result:
|
||||
return result
|
||||
return None
|
||||
found = _find(nodes)
|
||||
if found:
|
||||
return found
|
||||
# Create new group
|
||||
import uuid
|
||||
newId = str(uuid.uuid4())
|
||||
newGroup = {
|
||||
"id": newId,
|
||||
"name": featureInstanceId,
|
||||
"itemIds": [],
|
||||
"subGroups": [],
|
||||
"meta": {"featureInstanceId": featureInstanceId},
|
||||
}
|
||||
nodes.append(newGroup)
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return newId
|
||||
except Exception as e:
|
||||
logger.error(f"_ensureFeatureInstanceGroup failed: {e}")
|
||||
"""Stub — file group tree removed. Returns None."""
|
||||
return None
|
||||
|
||||
def copyFile(self, sourceFileId: str, newFileName: Optional[str] = None) -> FileItem:
|
||||
|
|
@ -1345,13 +1573,133 @@ class ComponentObjects:
|
|||
|
||||
# FileData methods - data operations
|
||||
|
||||
def _getFileItemForDataWrite(self, fileId: str) -> Optional[FileItem]:
|
||||
"""Resolve FileItem for storing FileData: RBAC-aware getFile, then same-user row fallback.
|
||||
|
||||
createFile() can insert a row that getFile() still hides (e.g. scope NULL vs GROUP rules,
|
||||
or connector / context edge cases). The creator must still be allowed to attach blob data.
|
||||
"""
|
||||
logger.info(
|
||||
"[FileData] resolve start fileId=%s iface_userId=%r effective_uid=%r mandateId=%r featureInstanceId=%r db=%s",
|
||||
fileId,
|
||||
self.userId,
|
||||
self._effective_user_id(),
|
||||
self.mandateId,
|
||||
self.featureInstanceId,
|
||||
getattr(self.db, "dbDatabase", "?"),
|
||||
)
|
||||
file = self.getFile(fileId)
|
||||
if file:
|
||||
logger.info("[FileData] getFile OK fileId=%s", fileId)
|
||||
return file
|
||||
uid = self._effective_user_id()
|
||||
if not uid:
|
||||
logger.error(
|
||||
"[FileData] FAIL no user id fileId=%s userId=%r hasCurrentUser=%s",
|
||||
fileId,
|
||||
self.userId,
|
||||
self.currentUser is not None,
|
||||
)
|
||||
return None
|
||||
uid_s = str(uid)
|
||||
rows = self.db.getRecordset(FileItem, recordFilter={"id": fileId})
|
||||
if not rows:
|
||||
logger.error(
|
||||
"[FileData] FAIL no FileItem row fileId=%s (createFile committed to same db? db=%s)",
|
||||
fileId,
|
||||
getattr(self.db, "dbDatabase", "?"),
|
||||
)
|
||||
return None
|
||||
row = dict(rows[0])
|
||||
creator = row.get("sysCreatedBy")
|
||||
creator_s = str(creator) if creator is not None else None
|
||||
if creator_s != uid_s:
|
||||
if not creator_s:
|
||||
try:
|
||||
self.db.recordModify(FileItem, fileId, {"sysCreatedBy": uid_s})
|
||||
row["sysCreatedBy"] = uid_s
|
||||
logger.warning(
|
||||
"[FileData] patched NULL sysCreatedBy fileId=%s -> %s",
|
||||
fileId,
|
||||
uid_s,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"[FileData] FAIL patch sysCreatedBy fileId=%s: %s",
|
||||
fileId,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
else:
|
||||
# _saveRecord used to overwrite explicit creators with contextvar "system"
|
||||
if creator_s == "system":
|
||||
try:
|
||||
self.db.recordModify(FileItem, fileId, {"sysCreatedBy": uid_s})
|
||||
row["sysCreatedBy"] = uid_s
|
||||
logger.warning(
|
||||
"[FileData] patched sysCreatedBy system→user fileId=%s -> %s",
|
||||
fileId,
|
||||
uid_s,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"[FileData] FAIL patch system sysCreatedBy fileId=%s: %s",
|
||||
fileId,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
else:
|
||||
logger.error(
|
||||
"[FileData] FAIL creator mismatch fileId=%s row.sysCreatedBy=%r (%s) effective_uid=%r (%s) scope=%r",
|
||||
fileId,
|
||||
creator,
|
||||
type(creator).__name__,
|
||||
uid,
|
||||
type(uid).__name__,
|
||||
row.get("scope"),
|
||||
)
|
||||
return None
|
||||
logger.info(
|
||||
"[FileData] RBAC miss, owner fallback OK fileId=%s scope=%r sysCreatedBy=%r",
|
||||
fileId,
|
||||
row.get("scope"),
|
||||
row.get("sysCreatedBy"),
|
||||
)
|
||||
try:
|
||||
if row.get("sysCreatedAt") is None or row.get("sysCreatedAt") in (0, 0.0):
|
||||
row["sysCreatedAt"] = getUtcTimestamp()
|
||||
if row.get("scope") is None:
|
||||
row["scope"] = "personal"
|
||||
if row.get("neutralize") is None:
|
||||
row["neutralize"] = False
|
||||
return FileItem(**row)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"[FileData] FAIL FileItem(**row) fileId=%s keys=%s err=%s",
|
||||
fileId,
|
||||
list(row.keys()),
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
return None
|
||||
|
||||
def createFileData(self, fileId: str, data: bytes) -> bool:
|
||||
"""Stores the binary data of a file in the database."""
|
||||
try:
|
||||
logger.info(
|
||||
"[FileData] createFileData enter fileId=%s bytes=%s",
|
||||
fileId,
|
||||
len(data) if data is not None else 0,
|
||||
)
|
||||
# Check file access
|
||||
file = self.getFile(fileId)
|
||||
file = self._getFileItemForDataWrite(fileId)
|
||||
if not file:
|
||||
logger.error(f"File with ID {fileId} not found when storing data")
|
||||
logger.error(
|
||||
"[FileData] FAIL _getFileItemForDataWrite returned None fileId=%s",
|
||||
fileId,
|
||||
)
|
||||
return False
|
||||
|
||||
# Determine if this is a text-based format
|
||||
|
|
@ -1396,12 +1744,10 @@ class ComponentObjects:
|
|||
|
||||
self.db.recordCreate(FileData, fileDataObj)
|
||||
|
||||
# Clear cache to ensure fresh data
|
||||
|
||||
logger.debug(f"Successfully stored data for file {fileId} (base64Encoded: {base64Encoded})")
|
||||
logger.info("[FileData] recordCreate OK fileId=%s base64Encoded=%s", fileId, base64Encoded)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing data for file {fileId}: {str(e)}")
|
||||
logger.error("Error storing data for file %s: %s", fileId, e, exc_info=True)
|
||||
return False
|
||||
|
||||
def getFileData(self, fileId: str) -> Optional[bytes]:
|
||||
|
|
|
|||
|
|
@ -204,6 +204,7 @@ TABLE_NAMESPACE = {
|
|||
# Files - benutzer-eigen
|
||||
"FileItem": "files",
|
||||
"FileData": "files",
|
||||
"FileFolder": "files",
|
||||
# Automation - benutzer-eigen
|
||||
"AutomationDefinition": "automation",
|
||||
"AutomationTemplate": "automation",
|
||||
|
|
@ -746,6 +747,7 @@ def buildFilesScopeWhereClause(
|
|||
Only own files: sysCreatedBy = currentUser
|
||||
|
||||
WITH instance context (Instanz-Seiten):
|
||||
- scope = 'personal' AND sysCreatedBy = me (creator's personal files; e.g. workflow outputs)
|
||||
- sysCreatedBy = me AND featureInstanceId = X (own personal files of this instance)
|
||||
- scope = 'featureInstance' AND featureInstanceId = X
|
||||
- scope = 'mandate' AND mandateId = M (M = mandate of the instance)
|
||||
|
|
@ -779,6 +781,15 @@ def buildFilesScopeWhereClause(
|
|||
scopeParts: List[str] = []
|
||||
scopeValues: List = []
|
||||
|
||||
# Personal files created by this user must remain visible even when the request
|
||||
# carries mandate/instance context (GROUP reads use this clause). Otherwise
|
||||
# createFile → createFileData → getFile fails and workflow outputs vanish from /files.
|
||||
# Also treat scope IS NULL as legacy/personal for the owner (column default not applied).
|
||||
scopeParts.append(
|
||||
'(("scope" = \'personal\' OR "scope" IS NULL) AND "sysCreatedBy" = %s)'
|
||||
)
|
||||
scopeValues.append(currentUser.id)
|
||||
|
||||
if featureInstanceId:
|
||||
# 1) Own personal files of this specific instance
|
||||
scopeParts.append('("sysCreatedBy" = %s AND "featureInstanceId" = %s)')
|
||||
|
|
|
|||
11
modules/migrations/_archive/README.md
Normal file
11
modules/migrations/_archive/README.md
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Archived one-off migrations
|
||||
|
||||
`migrate_folders_to_groups.py` copies `FileFolder` + `FileItem.folderId` into `TableGrouping` (`files/list`). It was used during an experimental UI path; **product choice** is to keep physical folders (`FileFolder`, `folderId`) and recover `FormGeneratorTree` (see `wiki/c-work/1-plan/2026-05-formgenerator-tree-and-folder-recovery.md`).
|
||||
|
||||
Run only if you need a historical data rescue:
|
||||
|
||||
```bash
|
||||
cd gateway
|
||||
python -m modules.migrations._archive.migrate_folders_to_groups --verbose
|
||||
python -m modules.migrations._archive.migrate_folders_to_groups --execute --verbose
|
||||
```
|
||||
1
modules/migrations/_archive/__init__.py
Normal file
1
modules/migrations/_archive/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
# Subpackage for archived one-off migration scripts (not part of normal app startup).
|
||||
|
|
@ -1,11 +1,16 @@
|
|||
"""
|
||||
One-time migration: Convert FileFolder tree + FileItem.folderId → table_groupings.
|
||||
One-time migration: Convert FileFolder tree + FileItem.folderId to table_groupings.
|
||||
|
||||
Archived per wiki plan 2026-05-formgenerator-tree-and-folder-recovery (Stage 1.A).
|
||||
Product direction: keep FileFolder + folderId; do not run DROP migrations.
|
||||
This script remains for audit / one-off data rescue only.
|
||||
|
||||
Run this BEFORE dropping the physical FileFolder table and FileItem.folderId column
|
||||
from the database (those are separate Alembic/SQL steps).
|
||||
from the database (those would be separate Alembic/SQL steps -- not part of current product path).
|
||||
|
||||
Usage:
|
||||
python -m modules.migrations.migrate_folders_to_groups [--dry-run] [--verbose]
|
||||
Usage (from gateway working directory):
|
||||
python -m modules.migrations._archive.migrate_folders_to_groups [--dry-run] [--verbose]
|
||||
python -m modules.migrations._archive.migrate_folders_to_groups --execute --verbose
|
||||
|
||||
Steps:
|
||||
1. For each distinct (userId, mandateId) combination that has FileFolder records:
|
||||
|
|
@ -30,6 +35,14 @@ from typing import Optional
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _scalarRow(row):
|
||||
if row is None:
|
||||
return None
|
||||
if isinstance(row, dict):
|
||||
return next(iter(row.values()))
|
||||
return row[0]
|
||||
|
||||
|
||||
# ── Helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
def _build_tree(folders: list, parent_id: Optional[str]) -> list:
|
||||
|
|
@ -76,11 +89,19 @@ def _now_ts() -> str:
|
|||
def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||
"""Main migration entry point."""
|
||||
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
|
||||
logger.info(f"Starting folder→group migration (dry_run={dry_run})")
|
||||
logger.info(f"Starting folder to group migration (dry_run={dry_run})")
|
||||
|
||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
connector = getCachedConnector()
|
||||
connector = getCachedConnector(
|
||||
dbHost=APP_CONFIG.get("DB_HOST", "_no_config_default_data"),
|
||||
dbDatabase="poweron_management",
|
||||
dbUser=APP_CONFIG.get("DB_USER"),
|
||||
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
|
||||
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||
userId=None,
|
||||
)
|
||||
if not connector or not connector.connection:
|
||||
logger.error("Could not obtain a DB connection. Aborting.")
|
||||
return
|
||||
|
|
@ -93,17 +114,17 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
|||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = 'FileFolder'
|
||||
)
|
||||
) AS ok
|
||||
""")
|
||||
folder_table_exists = cur.fetchone()[0]
|
||||
folder_table_exists = bool(_scalarRow(cur.fetchone()))
|
||||
|
||||
cur.execute("""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'FileItem' AND column_name = 'folderId'
|
||||
)
|
||||
) AS ok
|
||||
""")
|
||||
folder_column_exists = cur.fetchone()[0]
|
||||
folder_column_exists = bool(_scalarRow(cur.fetchone()))
|
||||
|
||||
if not folder_table_exists and not folder_column_exists:
|
||||
logger.info("FileFolder table and FileItem.folderId column not found — migration already applied or not needed.")
|
||||
|
|
@ -126,7 +147,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
|||
})
|
||||
logger.info(f"Loaded folders for {len(folders_by_user)} (user, mandate) combinations")
|
||||
|
||||
# ── 3. Load file→folder assignments ──────────────────────────────────────
|
||||
# ── 3. Load file to folder assignments ────────────────────────────────────
|
||||
files_by_key: dict = {}
|
||||
if folder_column_exists:
|
||||
cur.execute(
|
||||
|
|
@ -139,7 +160,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
|||
total_files = sum(
|
||||
sum(len(v) for v in d.values()) for d in files_by_key.values()
|
||||
)
|
||||
logger.info(f"Found {total_files} file→folder assignments across {len(files_by_key)} (user, mandate) combos")
|
||||
logger.info(f"Found {total_files} file to folder assignments across {len(files_by_key)} (user, mandate) combos")
|
||||
|
||||
# ── 4. Combine and upsert groupings ──────────────────────────────────────
|
||||
all_keys = set(folders_by_user.keys()) | set(files_by_key.keys())
|
||||
|
|
@ -231,7 +252,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Migrate FileFolder tree to table_groupings")
|
||||
parser = argparse.ArgumentParser(description="Migrate FileFolder tree to table_groupings (archived script)")
|
||||
parser.add_argument("--dry-run", action="store_true", default=True, help="Preview only, no DB writes (default)")
|
||||
parser.add_argument("--execute", action="store_true", help="Actually write to DB (disables dry-run)")
|
||||
parser.add_argument("--verbose", action="store_true", help="Show per-user details")
|
||||
|
|
@ -9,9 +9,9 @@ Features:
|
|||
- Admin endpoints: Manage settings, add credits, view all accounts
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query, Header
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query, Header, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import status
|
||||
import logging
|
||||
from datetime import date, datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
|
|
@ -24,7 +24,13 @@ from modules.interfaces.interfaceDbBilling import getInterface as getBillingInte
|
|||
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import getService as getBillingService
|
||||
import json
|
||||
import math
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.datamodels.datamodelPagination import (
|
||||
PaginationParams,
|
||||
PaginatedResponse,
|
||||
PaginationMetadata,
|
||||
normalize_pagination_dict,
|
||||
AppliedViewMeta,
|
||||
)
|
||||
from modules.datamodels.datamodelBilling import (
|
||||
BillingAccount,
|
||||
BillingTransaction,
|
||||
|
|
@ -478,31 +484,172 @@ def getBalanceForMandate(
|
|||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/transactions", response_model=List[TransactionResponse])
|
||||
def _normalize_billing_tx_dict(t: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Make billing transaction rows JSON/grouping-safe (datetimes → str, enums → str)."""
|
||||
from datetime import date as date_cls, datetime as dt_cls
|
||||
|
||||
r = dict(t)
|
||||
for k, v in list(r.items()):
|
||||
if isinstance(v, dt_cls):
|
||||
r[k] = v.isoformat()
|
||||
elif isinstance(v, date_cls):
|
||||
r[k] = v.isoformat()
|
||||
for ek in ("transactionType", "referenceType"):
|
||||
if ek in r and r[ek] is not None and not isinstance(r[ek], str):
|
||||
ev = r[ek]
|
||||
r[ek] = getattr(ev, "value", None) or str(ev)
|
||||
return r
|
||||
|
||||
|
||||
def _load_billing_user_transactions_normalized(billingService) -> List[Dict[str, Any]]:
|
||||
raw = billingService.getTransactionHistory(limit=5000)
|
||||
return [_normalize_billing_tx_dict(t) for t in raw]
|
||||
|
||||
|
||||
def _view_user_transactions_filtered_list(
|
||||
billing_interface,
|
||||
load_mandate_ids: Optional[List[str]],
|
||||
effective_scope: str,
|
||||
personal_user_id: Optional[str],
|
||||
pagination_params: PaginationParams,
|
||||
ctx_user,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Up to 5000 rows: SQL window + in-memory filters/sort (incl. enriched columns)."""
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
bulk_params = pagination_params.model_copy(deep=True)
|
||||
bulk_params.page = 1
|
||||
bulk_params.pageSize = 5000
|
||||
bulk_result = billing_interface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=load_mandate_ids,
|
||||
pagination=bulk_params,
|
||||
scope=effective_scope,
|
||||
userId=personal_user_id,
|
||||
)
|
||||
all_items = [_normalize_billing_tx_dict(dict(x)) for x in bulk_result.items]
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(ctx_user)
|
||||
if pagination_params.filters:
|
||||
all_items = comp._applyFilters(all_items, pagination_params.filters)
|
||||
if pagination_params.sort:
|
||||
all_items = comp._applySorting(all_items, pagination_params.sort)
|
||||
return all_items
|
||||
|
||||
|
||||
@router.get("/transactions")
|
||||
@limiter.limit("30/minute")
|
||||
def getTransactions(
|
||||
request: Request,
|
||||
limit: int = Query(default=50, ge=1, le=500),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
ctx: RequestContext = Depends(getRequestContext)
|
||||
pagination: Optional[str] = Query(
|
||||
None,
|
||||
description="JSON PaginationParams for table UI (filters, sort, viewKey, groupByLevels).",
|
||||
),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' | 'ids' with pagination"),
|
||||
column: Optional[str] = Query(None, description="Column for mode=filterValues"),
|
||||
ctx: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""
|
||||
Get transaction history across all mandates the user belongs to.
|
||||
|
||||
Without ``pagination`` query: legacy behaviour — returns a JSON array of
|
||||
transactions (`limit`/`offset` window).
|
||||
|
||||
With ``pagination`` JSON: returns ``{ items, pagination, groupLayout?, appliedView? }``.
|
||||
Table list views use contextKey ``billing/transactions``.
|
||||
"""
|
||||
try:
|
||||
billingService = getBillingService(
|
||||
ctx.user,
|
||||
ctx.mandateId,
|
||||
featureCode="billing"
|
||||
featureCode="billing",
|
||||
)
|
||||
|
||||
# Fetch enough transactions for pagination
|
||||
transactions = billingService.getTransactionHistory(limit=offset + limit)
|
||||
if pagination:
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
buildGroupLayout,
|
||||
effective_group_by_levels,
|
||||
handleFilterValuesInMemory,
|
||||
handleIdsInMemory,
|
||||
resolveView,
|
||||
)
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
# Convert to response model
|
||||
result = []
|
||||
for t in transactions[offset:offset + limit]:
|
||||
result.append(TransactionResponse(
|
||||
CONTEXT_KEY = "billing/transactions"
|
||||
|
||||
try:
|
||||
paginationDict = json.loads(pagination)
|
||||
if not paginationDict:
|
||||
raise ValueError("empty pagination")
|
||||
paginationDict = normalize_pagination_dict(paginationDict)
|
||||
paginationParams = PaginationParams(**paginationDict)
|
||||
except (json.JSONDecodeError, ValueError, TypeError) as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
appInterface = getAppInterface(ctx.user)
|
||||
viewKey = paginationParams.viewKey
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
all_items = _load_billing_user_transactions_normalized(billingService)
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
return handleFilterValuesInMemory(all_items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
return handleIdsInMemory(all_items, pagination)
|
||||
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(ctx.user)
|
||||
if paginationParams.filters:
|
||||
all_items = comp._applyFilters(all_items, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
all_items = comp._applySorting(all_items, paginationParams.sort)
|
||||
|
||||
totalItems = len(all_items)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
|
||||
if not groupByLevels:
|
||||
pstart = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
page_items = all_items[pstart : pstart + paginationParams.pageSize]
|
||||
group_layout = None
|
||||
else:
|
||||
page_items, group_layout = buildGroupLayout(
|
||||
all_items,
|
||||
groupByLevels,
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
)
|
||||
|
||||
resp: Dict[str, Any] = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
).model_dump(),
|
||||
}
|
||||
if group_layout:
|
||||
resp["groupLayout"] = group_layout.model_dump()
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return JSONResponse(content=resp)
|
||||
|
||||
transactions = billingService.getTransactionHistory(limit=offset + limit)
|
||||
result: List[TransactionResponse] = []
|
||||
for t in transactions[offset : offset + limit]:
|
||||
result.append(
|
||||
TransactionResponse(
|
||||
id=t.get("id"),
|
||||
accountId=t.get("accountId"),
|
||||
transactionType=TransactionTypeEnum(t.get("transactionType", "DEBIT")),
|
||||
|
|
@ -517,11 +664,13 @@ def getTransactions(
|
|||
createdByUserId=t.get("createdByUserId"),
|
||||
sysCreatedAt=t.get("sysCreatedAt"),
|
||||
mandateId=t.get("mandateId"),
|
||||
mandateName=t.get("mandateName")
|
||||
))
|
||||
|
||||
mandateName=t.get("mandateName"),
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting billing transactions: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
@ -1757,7 +1906,7 @@ def getUserViewStatistics(
|
|||
|
||||
|
||||
@router.get("/view/users/transactions", response_model=PaginatedResponse[UserTransactionResponse])
|
||||
@limiter.limit("30/minute")
|
||||
@limiter.limit("120/minute")
|
||||
def getUserViewTransactions(
|
||||
request: Request,
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
|
|
@ -1808,7 +1957,6 @@ def getUserViewTransactions(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from fastapi.responses import JSONResponse
|
||||
crossFilterParams = parseCrossFilterPagination(column, pagination)
|
||||
values = billingInterface.getTransactionDistinctValues(
|
||||
mandateIds=loadMandateIds,
|
||||
|
|
@ -1820,7 +1968,6 @@ def getUserViewTransactions(
|
|||
return JSONResponse(content=values)
|
||||
|
||||
if mode == "ids":
|
||||
from fastapi.responses import JSONResponse
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
import json as _json
|
||||
|
|
@ -1835,6 +1982,66 @@ def getUserViewTransactions(
|
|||
) if hasattr(billingInterface, 'getTransactionIds') else []
|
||||
return JSONResponse(content=ids)
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
import json as _json
|
||||
from collections import defaultdict
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
effective_group_by_levels,
|
||||
resolveView,
|
||||
)
|
||||
|
||||
pagination_dict = _json.loads(pagination)
|
||||
pagination_dict = normalize_pagination_dict(pagination_dict)
|
||||
summary_params = PaginationParams(**pagination_dict)
|
||||
CONTEXT_KEY = "billing/view/users/transactions"
|
||||
app_interface = getAppInterface(ctx.user)
|
||||
summary_vk = summary_params.viewKey
|
||||
summary_view_cfg, _ = resolveView(app_interface, CONTEXT_KEY, summary_vk)
|
||||
summary_params = applyViewToParams(summary_params, summary_view_cfg)
|
||||
levels = effective_group_by_levels(summary_params, summary_view_cfg)
|
||||
if not levels or not levels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = levels[0]["field"]
|
||||
null_label = str(levels[0].get("nullLabel") or "—")
|
||||
all_rows = _view_user_transactions_filtered_list(
|
||||
billingInterface,
|
||||
loadMandateIds,
|
||||
scope,
|
||||
personalUserId,
|
||||
summary_params,
|
||||
ctx.user,
|
||||
)
|
||||
counts: Dict[str, int] = defaultdict(int)
|
||||
labels: Dict[str, str] = {}
|
||||
null_key = "\x00NULL"
|
||||
for item in all_rows:
|
||||
raw = item.get(field)
|
||||
if raw is None or raw == "":
|
||||
nk = null_key
|
||||
labels[nk] = null_label
|
||||
else:
|
||||
nk = str(raw)
|
||||
if nk not in labels:
|
||||
labels[nk] = nk
|
||||
counts[nk] += 1
|
||||
groups_out: List[Dict[str, Any]] = []
|
||||
for nk in sorted(counts.keys(), key=lambda x: (x == null_key, labels.get(x, x).lower())):
|
||||
groups_out.append(
|
||||
{
|
||||
"value": None if nk == null_key else nk,
|
||||
"label": labels.get(nk, nk),
|
||||
"totalCount": counts[nk],
|
||||
}
|
||||
)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
import json as _json
|
||||
|
|
@ -1847,15 +2054,21 @@ def getUserViewTransactions(
|
|||
if not paginationParams:
|
||||
paginationParams = PaginationParams(page=1, pageSize=50)
|
||||
|
||||
result = billingInterface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=loadMandateIds,
|
||||
pagination=paginationParams,
|
||||
scope=effectiveScope,
|
||||
userId=personalUserId,
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
buildGroupLayout,
|
||||
effective_group_by_levels,
|
||||
resolveView,
|
||||
)
|
||||
|
||||
logger.debug(f"SQL-paginated {result.totalItems} transactions for user {ctx.user.id} "
|
||||
f"(scope={scope}, mandateId={mandateId}, page={paginationParams.page})")
|
||||
CONTEXT_KEY = "billing/view/users/transactions"
|
||||
appInterface = getAppInterface(ctx.user)
|
||||
viewKey = paginationParams.viewKey
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _toResponse(d):
|
||||
return UserTransactionResponse(
|
||||
|
|
@ -1875,7 +2088,54 @@ def getUserViewTransactions(
|
|||
mandateId=d.get("mandateId"),
|
||||
mandateName=d.get("mandateName"),
|
||||
userId=d.get("userId"),
|
||||
userName=d.get("userName")
|
||||
userName=d.get("userName"),
|
||||
)
|
||||
|
||||
if groupByLevels:
|
||||
all_items = _view_user_transactions_filtered_list(
|
||||
billingInterface,
|
||||
loadMandateIds,
|
||||
effectiveScope,
|
||||
personalUserId,
|
||||
paginationParams,
|
||||
ctx.user,
|
||||
)
|
||||
|
||||
totalItems = len(all_items)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, group_layout = buildGroupLayout(
|
||||
all_items,
|
||||
groupByLevels,
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
)
|
||||
resp: Dict[str, Any] = {
|
||||
"items": [_toResponse(d).model_dump(mode="json") for d in page_items],
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
).model_dump(mode="json"),
|
||||
}
|
||||
if group_layout:
|
||||
resp["groupLayout"] = group_layout.model_dump(mode="json")
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump(mode="json")
|
||||
return JSONResponse(content=resp)
|
||||
|
||||
result = billingInterface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=loadMandateIds,
|
||||
pagination=paginationParams,
|
||||
scope=effectiveScope,
|
||||
userId=personalUserId,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"SQL-paginated {result.totalItems} transactions for user {ctx.user.id} "
|
||||
f"(scope={scope}, mandateId={mandateId}, page={paginationParams.page})"
|
||||
)
|
||||
|
||||
return PaginatedResponse(
|
||||
|
|
@ -1887,7 +2147,7 @@ def getUserViewTransactions(
|
|||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -57,8 +57,8 @@ def _svc_for_connection(current_user: User, connection: UserConnection):
|
|||
services = getServices(current_user, None)
|
||||
if not services.clickup.setAccessTokenFromConnection(connection):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=routeApiMsg("Failed to set ClickUp access token"),
|
||||
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||
detail=routeApiMsg("Failed to set ClickUp access token. Connection may be expired or invalid."),
|
||||
)
|
||||
return services.clickup
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import logging
|
|||
import json
|
||||
import math
|
||||
from urllib.parse import quote
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from modules.datamodels.datamodelUam import User, UserConnection, AuthAuthority, ConnectionStatus
|
||||
from modules.datamodels.datamodelSecurity import Token
|
||||
|
|
@ -154,12 +155,12 @@ async def get_connections(
|
|||
"""
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
CONTEXT_KEY = "connections"
|
||||
|
||||
# Parse pagination params early — needed for grouping in all modes
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -171,7 +172,13 @@ async def get_connections(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
interface = getInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, interface, CONTEXT_KEY)
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(interface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _buildEnhancedItems():
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
|
|
@ -200,7 +207,6 @@ async def get_connections(
|
|||
try:
|
||||
items = _buildEnhancedItems()
|
||||
enrichRowsWithFkLabels(items, UserConnection)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting filter values for connections: {str(e)}")
|
||||
|
|
@ -208,19 +214,60 @@ async def get_connections(
|
|||
|
||||
if mode == "ids":
|
||||
try:
|
||||
items = applyGroupScopeFilter(_buildEnhancedItems(), groupCtx.itemIds)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
return handleIdsInMemory(_buildEnhancedItems(), pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting IDs for connections: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
try:
|
||||
# NOTE: Cannot use db.getRecordsetPaginated() here because each connection
|
||||
# is enriched with computed tokenStatus/tokenExpiresAt (requires per-row DB lookup).
|
||||
# Token refresh also may trigger re-fetch. Connections per user are typically < 10,
|
||||
# so in-memory pagination is acceptable.
|
||||
refresh_result = await token_refresh_service.refresh_expired_tokens(currentUser.id)
|
||||
if refresh_result.get("refreshed", 0) > 0:
|
||||
logger.info(
|
||||
"Silently refreshed %s tokens for user %s (groupSummary)",
|
||||
refresh_result["refreshed"],
|
||||
currentUser.id,
|
||||
)
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Silent token refresh failed for user {currentUser.id}: {str(e)}")
|
||||
enhanced_connections_dict = []
|
||||
for connection in connections:
|
||||
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
|
||||
enhanced_connections_dict.append({
|
||||
"id": connection.id,
|
||||
"userId": connection.userId,
|
||||
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
|
||||
"externalId": connection.externalId,
|
||||
"externalUsername": connection.externalUsername or "",
|
||||
"externalEmail": connection.externalEmail,
|
||||
"status": connection.status.value if hasattr(connection.status, 'value') else str(connection.status),
|
||||
"connectedAt": connection.connectedAt,
|
||||
"lastChecked": connection.lastChecked,
|
||||
"expiresAt": connection.expiresAt,
|
||||
"tokenStatus": tokenStatus,
|
||||
"tokenExpiresAt": tokenExpiresAt
|
||||
})
|
||||
enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
|
||||
filtered = apply_strategy_b_filters_and_sort(enhanced_connections_dict, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
# SECURITY FIX: All users (including admins) can only see their own connections
|
||||
try:
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
|
||||
# Perform silent token refresh for expired OAuth connections
|
||||
|
|
@ -235,7 +282,7 @@ async def get_connections(
|
|||
enhanced_connections_dict = []
|
||||
for connection in connections:
|
||||
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
|
||||
connection_dict = {
|
||||
enhanced_connections_dict.append({
|
||||
"id": connection.id,
|
||||
"userId": connection.userId,
|
||||
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
|
||||
|
|
@ -248,46 +295,31 @@ async def get_connections(
|
|||
"expiresAt": connection.expiresAt,
|
||||
"tokenStatus": tokenStatus,
|
||||
"tokenExpiresAt": tokenExpiresAt
|
||||
}
|
||||
enhanced_connections_dict.append(connection_dict)
|
||||
})
|
||||
|
||||
enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
|
||||
enhanced_connections_dict = applyGroupScopeFilter(enhanced_connections_dict, groupCtx.itemIds)
|
||||
|
||||
if paginationParams is None:
|
||||
return {
|
||||
"items": enhanced_connections_dict,
|
||||
"pagination": None,
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
return {"items": enhanced_connections_dict, "pagination": None}
|
||||
|
||||
# Apply filtering if provided
|
||||
# Apply filtering and sorting over full list (Strategy B)
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
if paginationParams.filters:
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
enhanced_connections_dict = component_interface._applyFilters(
|
||||
enhanced_connections_dict,
|
||||
paginationParams.filters
|
||||
)
|
||||
|
||||
# Apply sorting if provided
|
||||
enhanced_connections_dict = component_interface._applyFilters(enhanced_connections_dict, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
enhanced_connections_dict = component_interface._applySorting(
|
||||
enhanced_connections_dict,
|
||||
paginationParams.sort
|
||||
)
|
||||
enhanced_connections_dict = component_interface._applySorting(enhanced_connections_dict, paginationParams.sort)
|
||||
|
||||
totalItems = len(enhanced_connections_dict)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
endIdx = startIdx + paginationParams.pageSize
|
||||
paged_connections = enhanced_connections_dict[startIdx:endIdx]
|
||||
# Strategy B grouping: operates on full filtered+sorted list, then slices
|
||||
page_items, groupLayout = buildGroupLayout(
|
||||
enhanced_connections_dict, groupByLevels, paginationParams.page, paginationParams.pageSize
|
||||
)
|
||||
|
||||
return {
|
||||
"items": paged_connections,
|
||||
response: dict = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
|
|
@ -296,8 +328,12 @@ async def get_connections(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
if groupLayout:
|
||||
response["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -5,13 +5,14 @@ from fastapi.responses import JSONResponse
|
|||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
import json
|
||||
import math
|
||||
|
||||
# Import auth module
|
||||
from modules.auth import limiter, getCurrentUser, getRequestContext, RequestContext
|
||||
|
||||
# Import interfaces
|
||||
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
||||
from modules.datamodels.datamodelFiles import FileItem, FilePreview
|
||||
from modules.datamodels.datamodelFiles import FileItem, FilePreview, FileFolder
|
||||
from modules.shared.attributeUtils import getModelAttributeDefinitions
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
||||
|
|
@ -72,14 +73,18 @@ def _resolveFileWithScope(currentUser: User, context: RequestContext, fileId: st
|
|||
return scopedMgmt, fileItem
|
||||
|
||||
|
||||
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user):
|
||||
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user, *, mandateId: str = None, featureInstanceId: str = None):
|
||||
"""Background task: pre-scan + extraction + knowledge indexing.
|
||||
Step 1: Structure Pre-Scan (AI-free) -> FileContentIndex (persisted)
|
||||
Step 2: Content extraction via runExtraction -> ContentParts
|
||||
Step 3: KnowledgeService.requestIngestion -> idempotent chunking + embedding -> Knowledge Store"""
|
||||
userId = user.id if hasattr(user, "id") else str(user)
|
||||
try:
|
||||
mgmtInterface = interfaceDbManagement.getInterface(user)
|
||||
mgmtInterface = interfaceDbManagement.getInterface(
|
||||
user,
|
||||
mandateId=mandateId or None,
|
||||
featureInstanceId=featureInstanceId or None,
|
||||
)
|
||||
mgmtInterface.updateFile(fileId, {"status": "processing"})
|
||||
|
||||
rawBytes = mgmtInterface.getFileData(fileId)
|
||||
|
|
@ -250,6 +255,213 @@ router = APIRouter(
|
|||
}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/folders/tree")
|
||||
@limiter.limit("120/minute")
|
||||
def get_folder_tree(
|
||||
request: Request,
|
||||
owner: str = Query("me", description="'me' | 'shared'"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
o = (owner or "me").strip().lower()
|
||||
if o == "me":
|
||||
return managementInterface.getOwnFolderTree()
|
||||
if o == "shared":
|
||||
return managementInterface.getSharedFolderTree()
|
||||
raise HTTPException(status_code=400, detail="owner must be 'me' or 'shared'")
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"get_folder_tree error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/folders", status_code=status.HTTP_201_CREATED)
|
||||
@limiter.limit("30/minute")
|
||||
def create_folder(
|
||||
request: Request,
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
name = body.get("name")
|
||||
if not name or not str(name).strip():
|
||||
raise HTTPException(status_code=400, detail="name is required")
|
||||
parentId = body.get("parentId") or None
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.createFolder(str(name).strip(), parentId)
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"create_folder error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch("/folders/{folderId}")
|
||||
@limiter.limit("30/minute")
|
||||
def rename_folder(
|
||||
request: Request,
|
||||
folderId: str = Path(...),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
name = body.get("name")
|
||||
if not name or not str(name).strip():
|
||||
raise HTTPException(status_code=400, detail="name is required")
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.renameFolder(folderId, str(name).strip())
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"rename_folder error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/folders/{folderId}/move")
|
||||
@limiter.limit("30/minute")
|
||||
def move_folder(
|
||||
request: Request,
|
||||
folderId: str = Path(...),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
newParentId = body.get("parentId")
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.moveFolder(folderId, newParentId or None)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"move_folder error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/folders/{folderId}")
|
||||
@limiter.limit("30/minute")
|
||||
def delete_folder(
|
||||
request: Request,
|
||||
folderId: str = Path(...),
|
||||
cascade: bool = Query(True, description="Cascade delete sub-folders and files"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.deleteFolderCascade(folderId)
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"delete_folder error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch("/folders/{folderId}/scope")
|
||||
@limiter.limit("30/minute")
|
||||
def patch_folder_scope(
|
||||
request: Request,
|
||||
folderId: str = Path(...),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
scope = body.get("scope")
|
||||
if not scope:
|
||||
raise HTTPException(status_code=400, detail="scope is required")
|
||||
cascadeToFiles = body.get("cascadeToFiles", False)
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.patchFolderScope(folderId, scope, cascadeToFiles)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_folder_scope error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch("/folders/{folderId}/neutralize")
|
||||
@limiter.limit("30/minute")
|
||||
def patch_folder_neutralize(
|
||||
request: Request,
|
||||
folderId: str = Path(...),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
try:
|
||||
neutralize = body.get("neutralize")
|
||||
if neutralize is None:
|
||||
raise HTTPException(status_code=400, detail="neutralize is required")
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
return managementInterface.patchFolderNeutralize(folderId, bool(neutralize))
|
||||
except PermissionError as e:
|
||||
raise HTTPException(status_code=403, detail=str(e))
|
||||
except interfaceDbManagement.FileNotFoundError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_folder_neutralize error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/list")
|
||||
@limiter.limit("120/minute")
|
||||
def get_files(
|
||||
|
|
@ -289,9 +501,10 @@ def get_files(
|
|||
from modules.routes.routeHelpers import (
|
||||
handleIdsMode,
|
||||
handleFilterValuesInMemory,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
|
|
@ -299,11 +512,40 @@ def get_files(
|
|||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, "files/list")
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, "files/list", viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _filesToDicts(fileItems):
|
||||
return [f.model_dump() if hasattr(f, "model_dump") else (dict(f) if not isinstance(f, dict) else f) for f in fileItems]
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
allFiles = managementInterface.getAllFiles()
|
||||
allItems = enrichRowsWithFkLabels(
|
||||
_filesToDicts(allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])),
|
||||
FileItem,
|
||||
)
|
||||
filtered = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
|
|
@ -311,18 +553,18 @@ def get_files(
|
|||
items = allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])
|
||||
itemDicts = _filesToDicts(items)
|
||||
enrichRowsWithFkLabels(itemDicts, FileItem)
|
||||
itemDicts = applyGroupScopeFilter(itemDicts, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
return handleIdsMode(managementInterface.db, FileItem, pagination, recordFilter)
|
||||
|
||||
if not groupByLevels:
|
||||
# No grouping: let DB handle pagination directly (fastest path)
|
||||
result = managementInterface.getAllFiles(pagination=paginationParams)
|
||||
|
||||
if paginationParams:
|
||||
enriched = applyGroupScopeFilter(enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem), groupCtx.itemIds)
|
||||
return {
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem)
|
||||
resp: dict = {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
|
|
@ -332,12 +574,51 @@ def get_files(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
items = result if isinstance(result, list) else (result.items if hasattr(result, "items") else [result])
|
||||
enriched = applyGroupScopeFilter(enrichRowsWithFkLabels(_filesToDicts(items), FileItem), groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": groupCtx.groupTree}
|
||||
resp = {"items": enrichRowsWithFkLabels(_filesToDicts(items), FileItem), "pagination": None}
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
|
||||
# Strategy B grouping: load full list, group, then slice
|
||||
allFiles = managementInterface.getAllFiles()
|
||||
allItems = enrichRowsWithFkLabels(
|
||||
_filesToDicts(allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])),
|
||||
FileItem,
|
||||
)
|
||||
|
||||
from modules.routes.routeHelpers import apply_strategy_b_filters_and_sort
|
||||
if paginationParams.filters or paginationParams.sort:
|
||||
allItems = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
|
||||
if not paginationParams:
|
||||
resp = {"items": allItems, "pagination": None}
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
|
||||
totalItems = len(allItems)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, groupLayout = buildGroupLayout(allItems, groupByLevels, paginationParams.page, paginationParams.pageSize)
|
||||
|
||||
resp = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
if groupLayout:
|
||||
resp["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
|
|
@ -348,34 +629,11 @@ def get_files(
|
|||
)
|
||||
|
||||
|
||||
def _addFileToGroup(appInterface, fileId: str, groupId: str, contextKey: str = "files/list"):
|
||||
"""Add a file to a group in the persisted groupTree (upsert)."""
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups]
|
||||
def _add(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == groupId:
|
||||
itemIds = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fileId not in itemIds:
|
||||
itemIds.append(fileId)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = itemIds
|
||||
else:
|
||||
nd.itemIds = itemIds
|
||||
return True
|
||||
subs = nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])
|
||||
if _add(subs):
|
||||
return True
|
||||
return False
|
||||
_add(nodes)
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
except Exception as e:
|
||||
logger.warning(f"_addFileToGroup failed: {e}")
|
||||
def _LEGACY_addFileToGroup_REMOVED():
|
||||
"""Removed — file-group tree no longer exists. Use multi-select bulk operations."""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
@router.post("/upload", status_code=status.HTTP_201_CREATED)
|
||||
|
|
@ -385,7 +643,6 @@ async def upload_file(
|
|||
file: UploadFile = File(...),
|
||||
workflowId: Optional[str] = Form(None),
|
||||
featureInstanceId: Optional[str] = Form(None),
|
||||
groupId: Optional[str] = Form(None),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> JSONResponse:
|
||||
|
|
@ -419,12 +676,6 @@ async def upload_file(
|
|||
managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId})
|
||||
fileItem.featureInstanceId = featureInstanceId
|
||||
|
||||
# Add to group if groupId was provided
|
||||
if groupId:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
_addFileToGroup(appInterface, fileItem.id, groupId)
|
||||
|
||||
# Determine response message based on duplicate type
|
||||
if duplicateType == "exact_duplicate":
|
||||
message = f"File '{file.filename}' already exists with identical content. Reusing existing file."
|
||||
|
|
@ -462,6 +713,8 @@ async def upload_file(
|
|||
fileName=fileItem.fileName,
|
||||
mimeType=fileItem.mimeType,
|
||||
user=currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
))
|
||||
except Exception as indexErr:
|
||||
logger.warning(f"Auto-index trigger failed (non-blocking): {indexErr}")
|
||||
|
|
@ -526,82 +779,172 @@ def batch_delete_items(
|
|||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ── Group bulk endpoints ──────────────────────────────────────────────────────
|
||||
|
||||
def _get_group_item_ids(contextKey: str, groupId: str, appInterface) -> set:
|
||||
"""Collect all file IDs in a group and its sub-groups from the stored groupTree."""
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return set()
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups]
|
||||
result = _collectItemIds(nodes, groupId)
|
||||
return result or set()
|
||||
except Exception as e:
|
||||
logger.error(f"_get_group_item_ids failed for groupId={groupId}: {e}")
|
||||
return set()
|
||||
|
||||
|
||||
@router.patch("/groups/{groupId}/scope")
|
||||
@limiter.limit("60/minute")
|
||||
def patch_group_scope(
|
||||
@router.post("/batch-download")
|
||||
@limiter.limit("10/minute")
|
||||
def batchDownload(
|
||||
request: Request,
|
||||
body: Dict[str, Any] = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Download multiple files and/or folders as a single ZIP archive,
|
||||
preserving the folder hierarchy as ZIP paths."""
|
||||
import io, zipfile
|
||||
|
||||
fileIds = body.get("fileIds") or []
|
||||
folderIds = body.get("folderIds") or []
|
||||
|
||||
if not fileIds and not folderIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds or folderIds required")
|
||||
|
||||
try:
|
||||
mgmt = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
|
||||
folderCache: dict[str, dict] = {}
|
||||
|
||||
def _getFolder(fid: str):
|
||||
if fid not in folderCache:
|
||||
f = mgmt.getFolder(fid)
|
||||
folderCache[fid] = f if f else {}
|
||||
return folderCache[fid]
|
||||
|
||||
def _folderPath(fid: str) -> str:
|
||||
"""Build the full path for a folder by walking up parentId."""
|
||||
parts: list[str] = []
|
||||
current = fid
|
||||
visited: set[str] = set()
|
||||
while current and current not in visited:
|
||||
visited.add(current)
|
||||
folder = _getFolder(current)
|
||||
if not folder:
|
||||
break
|
||||
parts.append(folder.get("name", current))
|
||||
current = folder.get("parentId")
|
||||
parts.reverse()
|
||||
return "/".join(parts)
|
||||
|
||||
# Collect files from requested folders (recursive)
|
||||
fileEntries: list[tuple[str, str]] = []
|
||||
seenFileIds: set[str] = set()
|
||||
|
||||
for fid in folderIds:
|
||||
childFolderIds = mgmt._collectChildFolderIds(fid)
|
||||
for cfid in childFolderIds:
|
||||
prefix = _folderPath(cfid)
|
||||
items = mgmt.db.getRecordset(FileItem, recordFilter={"folderId": cfid})
|
||||
for item in items:
|
||||
itemId = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||
if itemId and itemId not in seenFileIds:
|
||||
seenFileIds.add(itemId)
|
||||
fileEntries.append((itemId, prefix))
|
||||
|
||||
# Loose files (not via folder selection)
|
||||
for fid in fileIds:
|
||||
if fid in seenFileIds:
|
||||
continue
|
||||
seenFileIds.add(fid)
|
||||
fileMeta = mgmt.getFile(fid)
|
||||
if not fileMeta:
|
||||
continue
|
||||
fileFolderId = fileMeta.get("folderId") if isinstance(fileMeta, dict) else getattr(fileMeta, "folderId", None)
|
||||
prefix = _folderPath(fileFolderId) if fileFolderId else ""
|
||||
fileEntries.append((fid, prefix))
|
||||
|
||||
if not fileEntries:
|
||||
raise HTTPException(status_code=404, detail="No downloadable files found")
|
||||
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for fid, prefix in fileEntries:
|
||||
try:
|
||||
fileMeta = mgmt.getFile(fid)
|
||||
fileData = mgmt.getFileData(fid)
|
||||
if fileMeta and fileData:
|
||||
name = (fileMeta.get("fileName") if isinstance(fileMeta, dict) else getattr(fileMeta, "fileName", fid)) or fid
|
||||
zipPath = f"{prefix}/{name}" if prefix else name
|
||||
zf.writestr(zipPath, fileData)
|
||||
except Exception as fe:
|
||||
logger.warning(f"batch_download: skipping file {fid}: {fe}")
|
||||
buf.seek(0)
|
||||
from fastapi.responses import StreamingResponse
|
||||
return StreamingResponse(
|
||||
buf,
|
||||
media_type="application/zip",
|
||||
headers={"Content-Disposition": 'attachment; filename="download.zip"'},
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"batch_download error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ── Bulk file operations (replace former group-based bulk routes) ─────────────
|
||||
|
||||
@router.post("/bulk/scope")
|
||||
@limiter.limit("30/minute")
|
||||
def bulk_set_scope(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Set scope for all files in a group (recursive)."""
|
||||
scope = body.get("scope")
|
||||
if not scope:
|
||||
raise HTTPException(status_code=400, detail="scope is required")
|
||||
"""Set scope for a list of files by their IDs."""
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
scope: str = body.get("scope") or ""
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
validScopes = {"personal", "featureInstance", "mandate", "global"}
|
||||
if scope not in validScopes:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid scope. Must be one of {validScopes}")
|
||||
if scope == "global" and not context.isSysAdmin:
|
||||
raise HTTPException(status_code=403, detail="Only sysadmins can set global scope")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
updated = 0
|
||||
for fid in fileIds:
|
||||
try:
|
||||
managementInterface.updateFile(fid, {"scope": scope})
|
||||
updated += 1
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_scope: failed to update file {fid}: {e}")
|
||||
return {"groupId": groupId, "scope": scope, "filesUpdated": updated}
|
||||
logger.error(f"bulk_set_scope: failed for file {fid}: {e}")
|
||||
return {"scope": scope, "filesUpdated": updated}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_scope error: {e}")
|
||||
logger.error(f"bulk_set_scope error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch("/groups/{groupId}/neutralize")
|
||||
@limiter.limit("60/minute")
|
||||
def patch_group_neutralize(
|
||||
@router.post("/bulk/neutralize")
|
||||
@limiter.limit("30/minute")
|
||||
def bulk_set_neutralize(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Toggle neutralize for all files in a group (recursive, incl. knowledge purge/reindex)."""
|
||||
"""Set neutralize flag for a list of files by their IDs (incl. knowledge purge/reindex)."""
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
neutralize = body.get("neutralize")
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
if neutralize is None:
|
||||
raise HTTPException(status_code=400, detail="neutralize is required")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
updated = 0
|
||||
for fid in fileIds:
|
||||
try:
|
||||
|
|
@ -612,39 +955,37 @@ def patch_group_neutralize(
|
|||
kIface = interfaceDbKnowledge.getInterface(currentUser)
|
||||
kIface.purgeFileKnowledge(fid)
|
||||
except Exception as ke:
|
||||
logger.warning(f"patch_group_neutralize: knowledge purge failed for {fid}: {ke}")
|
||||
logger.warning(f"bulk_set_neutralize: knowledge purge failed for {fid}: {ke}")
|
||||
updated += 1
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_neutralize: failed for file {fid}: {e}")
|
||||
return {"groupId": groupId, "neutralize": neutralize, "filesUpdated": updated}
|
||||
logger.error(f"bulk_set_neutralize: failed for file {fid}: {e}")
|
||||
return {"neutralize": neutralize, "filesUpdated": updated}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_neutralize error: {e}")
|
||||
logger.error(f"bulk_set_neutralize error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/groups/{groupId}/download")
|
||||
@limiter.limit("20/minute")
|
||||
async def download_group_zip(
|
||||
@router.post("/bulk/download-zip")
|
||||
@limiter.limit("10/minute")
|
||||
async def bulk_download_zip(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Download all files in a group as a ZIP archive."""
|
||||
"""Download a list of files as a ZIP archive."""
|
||||
import io, zipfile
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=404, detail="Group not found or empty")
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for fid in fileIds:
|
||||
|
|
@ -652,63 +993,21 @@ async def download_group_zip(
|
|||
fileMeta = managementInterface.getFile(fid)
|
||||
fileData = managementInterface.getFileData(fid)
|
||||
if fileMeta and fileData:
|
||||
name = (fileMeta.get("fileName") if isinstance(fileMeta, dict) else getattr(fileMeta, "fileName", fid)) or fid
|
||||
name = (getattr(fileMeta, "fileName", None) or fid)
|
||||
zf.writestr(name, fileData)
|
||||
except Exception as fe:
|
||||
logger.warning(f"download_group_zip: skipping file {fid}: {fe}")
|
||||
logger.warning(f"bulk_download_zip: skipping file {fid}: {fe}")
|
||||
buf.seek(0)
|
||||
from fastapi.responses import StreamingResponse
|
||||
return StreamingResponse(
|
||||
buf,
|
||||
media_type="application/zip",
|
||||
headers={"Content-Disposition": f'attachment; filename="group-{groupId}.zip"'},
|
||||
headers={"Content-Disposition": 'attachment; filename="files.zip"'},
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"download_group_zip error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/groups/{groupId}")
|
||||
@limiter.limit("30/minute")
|
||||
def delete_group(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
deleteItems: bool = Query(False, description="If true, also delete all files in the group"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Remove a group from the groupTree. Optionally delete all its files."""
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
# Remove group from tree
|
||||
existing = appInterface.getTableGrouping("files/list")
|
||||
if existing:
|
||||
from modules.routes.routeHelpers import _removeGroupFromTree
|
||||
newRoots = _removeGroupFromTree([n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups], groupId)
|
||||
appInterface.upsertTableGrouping("files/list", newRoots)
|
||||
# Optionally delete files
|
||||
deletedFiles = 0
|
||||
if deleteItems:
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
for fid in fileIds:
|
||||
try:
|
||||
managementInterface.deleteFile(fid)
|
||||
deletedFiles += 1
|
||||
except Exception as e:
|
||||
logger.error(f"delete_group: failed to delete file {fid}: {e}")
|
||||
return {"groupId": groupId, "deletedFiles": deletedFiles}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"delete_group error: {e}")
|
||||
logger.error(f"bulk_download_zip error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
|
@ -759,7 +1058,11 @@ def updateFileScope(
|
|||
|
||||
async def _runReindexAfterScopeChange():
|
||||
try:
|
||||
await _autoIndexFile(fileId=fileId, fileName=fn, mimeType=mt, user=context.user)
|
||||
await _autoIndexFile(
|
||||
fileId=fileId, fileName=fn, mimeType=mt, user=context.user,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.warning("Re-index after scope change failed for %s: %s", fileId, ex)
|
||||
|
||||
|
|
@ -837,7 +1140,11 @@ def updateFileNeutralize(
|
|||
|
||||
async def _runReindexAfterNeutralizeToggle():
|
||||
try:
|
||||
await _autoIndexFile(fileId=fileId, fileName=fn, mimeType=mt, user=context.user)
|
||||
await _autoIndexFile(
|
||||
fileId=fileId, fileName=fn, mimeType=mt, user=context.user,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.error("Re-index after neutralize toggle failed for %s: %s (file has NO index until next re-index)", fileId, ex)
|
||||
|
||||
|
|
@ -909,7 +1216,7 @@ def update_file(
|
|||
) -> FileItem:
|
||||
"""Update file info"""
|
||||
try:
|
||||
_EDITABLE_FIELDS = {"fileName", "scope", "tags", "description", "neutralize"}
|
||||
_EDITABLE_FIELDS = {"fileName", "folderId", "scope", "tags", "description", "neutralize"}
|
||||
safeData = {k: v for k, v in file_info.items() if k in _EDITABLE_FIELDS}
|
||||
if not safeData:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("No editable fields provided"))
|
||||
|
|
|
|||
|
|
@ -131,11 +131,9 @@ def get_mandates(
|
|||
handleFilterValuesInMemory, handleIdsInMemory,
|
||||
handleFilterValuesMode, handleIdsMode,
|
||||
parseCrossFilterPagination,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
)
|
||||
|
||||
appInterface = interfaceDbApp.getRootInterface()
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, "mandates")
|
||||
|
||||
def _mandateItemsForAdmin():
|
||||
items = []
|
||||
|
|
@ -154,23 +152,18 @@ def get_mandates(
|
|||
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(mandateItems, column, pagination)
|
||||
return handleFilterValuesInMemory(_mandateItemsForAdmin(), column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
if isPlatformAdmin:
|
||||
return handleIdsMode(appInterface.db, Mandate, pagination)
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return handleIdsInMemory(mandateItems, pagination)
|
||||
return handleIdsInMemory(_mandateItemsForAdmin(), pagination)
|
||||
|
||||
if isPlatformAdmin:
|
||||
result = appInterface.getAllMandates(pagination=paginationParams)
|
||||
items = result.items if hasattr(result, 'items') else (result if isinstance(result, list) else [])
|
||||
items = applyGroupScopeFilter(
|
||||
[i.model_dump() if hasattr(i, 'model_dump') else (i if isinstance(i, dict) else vars(i)) for i in items],
|
||||
groupCtx.itemIds,
|
||||
)
|
||||
items = [i.model_dump() if hasattr(i, 'model_dump') else (i if isinstance(i, dict) else vars(i)) for i in items]
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=items,
|
||||
|
|
@ -182,13 +175,11 @@ def get_mandates(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
),
|
||||
groupTree=groupCtx.groupTree,
|
||||
)
|
||||
else:
|
||||
return PaginatedResponse(items=items, pagination=None, groupTree=groupCtx.groupTree)
|
||||
return PaginatedResponse(items=items, pagination=None)
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return PaginatedResponse(items=mandateItems, pagination=None, groupTree=groupCtx.groupTree)
|
||||
return PaginatedResponse(items=_mandateItemsForAdmin(), pagination=None)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@
|
|||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Query
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
import logging
|
||||
import json
|
||||
import math
|
||||
|
||||
# Import auth module
|
||||
from modules.auth import limiter, getCurrentUser
|
||||
|
|
@ -46,13 +48,13 @@ def get_prompts(
|
|||
"""
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
CONTEXT_KEY = "prompts"
|
||||
|
||||
# Parse pagination params early — needed for grouping in all modes
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -64,7 +66,13 @@ def get_prompts(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
appInterface = getAppInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, CONTEXT_KEY)
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _promptsToEnrichedDicts(promptItems):
|
||||
dicts = [r.model_dump() if hasattr(r, 'model_dump') else (dict(r) if not isinstance(r, dict) else r) for r in promptItems]
|
||||
|
|
@ -73,26 +81,44 @@ def get_prompts(
|
|||
|
||||
managementInterface = interfaceDbManagement.getInterface(currentUser)
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
allItems = _promptsToEnrichedDicts(
|
||||
result if isinstance(result, list) else (result.items if hasattr(result, "items") else [])
|
||||
)
|
||||
filtered = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
items = _promptsToEnrichedDicts(result)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
return handleFilterValuesInMemory(_promptsToEnrichedDicts(result), column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
items = _promptsToEnrichedDicts(result)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
return handleIdsInMemory(_promptsToEnrichedDicts(result), pagination)
|
||||
|
||||
if not groupByLevels:
|
||||
# No grouping: let DB handle pagination directly
|
||||
result = managementInterface.getAllPrompts(pagination=paginationParams)
|
||||
|
||||
if paginationParams:
|
||||
items = applyGroupScopeFilter(_promptsToEnrichedDicts(result.items), groupCtx.itemIds)
|
||||
return {
|
||||
"items": items,
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
response: dict = {
|
||||
"items": _promptsToEnrichedDicts(result.items),
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
|
|
@ -101,15 +127,52 @@ def get_prompts(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
items = applyGroupScopeFilter(_promptsToEnrichedDicts(result), groupCtx.itemIds)
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": None,
|
||||
"groupTree": groupCtx.groupTree,
|
||||
response = {"items": _promptsToEnrichedDicts(result if isinstance(result, list) else [result]), "pagination": None}
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
# Strategy B grouping: load all, filter+sort in-memory, group, then slice
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
allItems = _promptsToEnrichedDicts(result if isinstance(result, list) else (result.items if hasattr(result, 'items') else []))
|
||||
|
||||
if not paginationParams:
|
||||
response = {"items": allItems, "pagination": None}
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
if paginationParams.filters or paginationParams.sort:
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(currentUser)
|
||||
if paginationParams.filters:
|
||||
allItems = comp._applyFilters(allItems, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
allItems = comp._applySorting(allItems, paginationParams.sort)
|
||||
|
||||
totalItems = len(allItems)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, groupLayout = buildGroupLayout(allItems, groupByLevels, paginationParams.page, paginationParams.pageSize)
|
||||
|
||||
response = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
if groupLayout:
|
||||
response["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
|
||||
@router.post("", response_model=Prompt)
|
||||
|
|
|
|||
|
|
@ -208,7 +208,6 @@ def get_users(
|
|||
- GET /api/users/ (no pagination - returns all users in mandate)
|
||||
- GET /api/users/?pagination={"page":1,"pageSize":10,"sort":[]}
|
||||
"""
|
||||
# Parse pagination early — needed for grouping in all modes
|
||||
_paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -219,10 +218,6 @@ def get_users(
|
|||
except (json.JSONDecodeError, ValueError) as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
from modules.routes.routeHelpers import handleGroupingInRequest as _handleGrouping, applyGroupScopeFilter as _applyGroupScope
|
||||
_appInterfaceForGrouping = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
|
||||
_groupCtx = _handleGrouping(_paginationParams, _appInterfaceForGrouping, "users")
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
|
|
@ -233,14 +228,12 @@ def get_users(
|
|||
|
||||
try:
|
||||
paginationParams = _paginationParams
|
||||
appInterface = _appInterfaceForGrouping
|
||||
appInterface = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
|
||||
|
||||
if context.mandateId:
|
||||
# Get users for specific mandate using getUsersByMandate
|
||||
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(result.items), User), _groupCtx.itemIds)
|
||||
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -251,18 +244,14 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
users = result if isinstance(result, list) else result.items if hasattr(result, 'items') else []
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(users), User), _groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enrichRowsWithFkLabels(_usersToDicts(users), User), "pagination": None}
|
||||
elif context.isPlatformAdmin:
|
||||
# PlatformAdmin without mandateId — DB-level pagination via interface
|
||||
result = appInterface.getAllUsers(paginationParams)
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(result.items), User), _groupCtx.itemIds)
|
||||
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -273,18 +262,13 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(users), User), _groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enrichRowsWithFkLabels(_usersToDicts(users), User), "pagination": None}
|
||||
else:
|
||||
# Non-SysAdmin without mandateId: aggregate users across all admin mandates
|
||||
rootInterface = getRootInterface()
|
||||
userMandates = rootInterface.getUserMandates(str(context.user.id))
|
||||
|
||||
# Find mandates where user has admin role
|
||||
adminMandateIds = []
|
||||
for um in userMandates:
|
||||
umId = getattr(um, 'id', None)
|
||||
|
|
@ -299,10 +283,7 @@ def get_users(
|
|||
break
|
||||
|
||||
if not adminMandateIds:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=routeApiMsg("No admin access to any mandate")
|
||||
)
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("No admin access to any mandate"))
|
||||
|
||||
from modules.datamodels.datamodelMembership import UserMandate as UserMandateModel
|
||||
allUM = rootInterface.db.getRecordset(UserMandateModel, recordFilter={"mandateId": adminMandateIds})
|
||||
|
|
@ -312,13 +293,10 @@ def get_users(
|
|||
if (um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
|
||||
})
|
||||
batchUsers = rootInterface.getUsersByIds(uniqueUserIds) if uniqueUserIds else {}
|
||||
allUsers = [
|
||||
u.model_dump() if hasattr(u, 'model_dump') else vars(u)
|
||||
for u in batchUsers.values()
|
||||
]
|
||||
allUsers = [u.model_dump() if hasattr(u, 'model_dump') else vars(u) for u in batchUsers.values()]
|
||||
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort as _applyFiltersAndSortHelper
|
||||
filteredUsers = _applyGroupScope(_applyFiltersAndSortHelper(allUsers, paginationParams), _groupCtx.itemIds)
|
||||
filteredUsers = _applyFiltersAndSortHelper(allUsers, paginationParams)
|
||||
enriched = enrichRowsWithFkLabels(filteredUsers, User)
|
||||
|
||||
if paginationParams:
|
||||
|
|
@ -327,7 +305,6 @@ def get_users(
|
|||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
endIdx = startIdx + paginationParams.pageSize
|
||||
|
||||
return {
|
||||
"items": enriched[startIdx:endIdx],
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -338,10 +315,9 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enriched, "pagination": None}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -704,154 +704,260 @@ def paginateInMemory(
|
|||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Table Grouping helpers
|
||||
# View resolution and Strategy B grouping engine
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
from dataclasses import dataclass, field as dc_field
|
||||
|
||||
|
||||
@dataclass
|
||||
class GroupingContext:
|
||||
def resolveView(interface, contextKey: str, viewKey: Optional[str]):
|
||||
"""
|
||||
Result of handleGroupingInRequest.
|
||||
Carries the group tree for the response and the resolved item-ID set for
|
||||
group-scope filtering (None = no active group scope).
|
||||
Load a TableListView for the current user and contextKey.
|
||||
|
||||
Returns (config_dict, display_name):
|
||||
- (None, None) when viewKey is None / empty
|
||||
- (config, str | None) otherwise — config may be {}; display_name from the row
|
||||
|
||||
Raises HTTPException(404) when viewKey is explicitly set but the view
|
||||
does not exist (prevents silent fallback to ungrouped behaviour).
|
||||
"""
|
||||
groupTree: Optional[list] # List[TableGroupNode] serialised as dicts — for response
|
||||
itemIds: Optional[set] # Set[str] when groupId was set, else None
|
||||
|
||||
|
||||
def _collectItemIds(nodes: list, groupId: str) -> Optional[set]:
|
||||
"""
|
||||
Recursively search *nodes* for a node whose id == groupId and collect
|
||||
all itemIds from it and all its descendant subGroups.
|
||||
Returns None if the group is not found.
|
||||
"""
|
||||
for node in nodes:
|
||||
nodeId = node.get("id") if isinstance(node, dict) else getattr(node, "id", None)
|
||||
if nodeId == groupId:
|
||||
ids: set = set()
|
||||
_collectAllIds(node, ids)
|
||||
return ids
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
result = _collectItemIds(subGroups, groupId)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
|
||||
|
||||
def _collectAllIds(node, ids: set) -> None:
|
||||
"""Collect itemIds from a node and all its descendants into ids."""
|
||||
nodeItemIds = node.get("itemIds", []) if isinstance(node, dict) else getattr(node, "itemIds", [])
|
||||
for iid in nodeItemIds:
|
||||
ids.add(str(iid))
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
for child in subGroups:
|
||||
_collectAllIds(child, ids)
|
||||
|
||||
|
||||
def _removeGroupFromTree(nodes: list, groupId: str) -> list:
|
||||
"""Remove a group node (and all descendants) from the tree by id."""
|
||||
result = []
|
||||
for node in nodes:
|
||||
nodeId = node.get("id") if isinstance(node, dict) else getattr(node, "id", None)
|
||||
if nodeId == groupId:
|
||||
continue # skip this node (remove it)
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
filtered_sub = _removeGroupFromTree(subGroups, groupId)
|
||||
if isinstance(node, dict):
|
||||
node = {**node, "subGroups": filtered_sub}
|
||||
result.append(node)
|
||||
return result
|
||||
|
||||
|
||||
def handleGroupingInRequest(
|
||||
paginationParams: Optional[PaginationParams],
|
||||
interface,
|
||||
contextKey: str,
|
||||
) -> GroupingContext:
|
||||
"""
|
||||
Central grouping handler — call at the start of every list route that
|
||||
supports table grouping.
|
||||
|
||||
Steps (in order):
|
||||
1. If paginationParams.saveGroupTree is set:
|
||||
persist the new tree via interface.upsertTableGrouping, then clear
|
||||
saveGroupTree from paginationParams so it is not treated as a filter.
|
||||
2. Load the current group tree from the DB (used in step 3 and response).
|
||||
3. If paginationParams.groupId is set:
|
||||
resolve it to a Set[str] of itemIds (including all sub-groups),
|
||||
then clear groupId from paginationParams so it is not treated as a
|
||||
normal filter field.
|
||||
4. Return a GroupingContext with groupTree (for the response) and itemIds
|
||||
(for applyGroupScopeFilter).
|
||||
|
||||
The caller does NOT need to handle any grouping logic itself — just call
|
||||
applyGroupScopeFilter(items, groupCtx.itemIds) and embed groupCtx.groupTree
|
||||
in the response dict.
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGroupNode
|
||||
|
||||
groupTree = None
|
||||
itemIds = None
|
||||
|
||||
if paginationParams is None:
|
||||
from fastapi import HTTPException
|
||||
if not viewKey:
|
||||
return None, None
|
||||
try:
|
||||
existing = interface.getTableGrouping(contextKey)
|
||||
if existing:
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in existing.rootGroups]
|
||||
view = interface.getTableListView(contextKey=contextKey, viewKey=viewKey)
|
||||
except Exception as e:
|
||||
logger.warning(f"handleGroupingInRequest: getTableGrouping failed: {e}")
|
||||
return GroupingContext(groupTree=groupTree, itemIds=None)
|
||||
logger.warning(f"resolveView: store lookup failed for key={viewKey!r} context={contextKey!r}: {e}")
|
||||
view = None
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewKey}' not found for context '{contextKey}'")
|
||||
cfg = view.config or {}
|
||||
dname = getattr(view, "displayName", None) or None
|
||||
return cfg, dname
|
||||
|
||||
# Step 1: persist saveGroupTree if present
|
||||
if paginationParams.saveGroupTree is not None:
|
||||
try:
|
||||
saved = interface.upsertTableGrouping(contextKey, paginationParams.saveGroupTree)
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in saved.rootGroups]
|
||||
except Exception as e:
|
||||
logger.error(f"handleGroupingInRequest: upsertTableGrouping failed: {e}")
|
||||
paginationParams.saveGroupTree = None
|
||||
|
||||
# Step 2: load current tree (only if not already set from save above)
|
||||
if groupTree is None:
|
||||
try:
|
||||
existing = interface.getTableGrouping(contextKey)
|
||||
if existing:
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in existing.rootGroups]
|
||||
except Exception as e:
|
||||
logger.warning(f"handleGroupingInRequest: getTableGrouping failed: {e}")
|
||||
def effective_group_by_levels(
|
||||
pagination_params: Optional["PaginationParams"],
|
||||
view_config: Optional[dict],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Choose grouping levels for this request.
|
||||
|
||||
# Step 3: resolve groupId to itemIds set
|
||||
if paginationParams.groupId is not None:
|
||||
targetGroupId = paginationParams.groupId
|
||||
paginationParams.groupId = None # remove so it is not treated as a normal filter
|
||||
if groupTree:
|
||||
itemIds = _collectItemIds(groupTree, targetGroupId)
|
||||
if itemIds is None:
|
||||
logger.warning(
|
||||
f"handleGroupingInRequest: groupId={targetGroupId!r} not found in tree "
|
||||
f"for contextKey={contextKey!r} — returning empty set"
|
||||
)
|
||||
itemIds = set() # unknown group → show nothing rather than everything
|
||||
If the client sends ``groupByLevels`` (including ``[]``), it wins over the
|
||||
saved view. If the key is omitted (``None``), use the view's levels.
|
||||
"""
|
||||
if pagination_params is not None:
|
||||
req = getattr(pagination_params, "groupByLevels", None)
|
||||
if req is not None:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for lvl in req:
|
||||
if hasattr(lvl, "model_dump"):
|
||||
out.append(lvl.model_dump())
|
||||
elif isinstance(lvl, dict):
|
||||
out.append(dict(lvl))
|
||||
else:
|
||||
# groupId sent but no tree saved yet → return empty (nothing belongs to any group)
|
||||
logger.warning(
|
||||
f"handleGroupingInRequest: groupId={targetGroupId!r} set but no tree exists "
|
||||
f"for contextKey={contextKey!r} — returning empty set"
|
||||
out.append(dict(lvl)) # type: ignore[arg-type]
|
||||
return out
|
||||
vc = (view_config or {}).get("groupByLevels") if view_config else None
|
||||
return list(vc or [])
|
||||
|
||||
|
||||
def applyViewToParams(params: Optional["PaginationParams"], viewConfig: Optional[dict]) -> Optional["PaginationParams"]:
|
||||
"""
|
||||
Merge a view's saved configuration into PaginationParams.
|
||||
|
||||
Priority: explicit request fields win over view defaults.
|
||||
- sort: use request sort if non-empty, otherwise view sort
|
||||
- filters: deep-merge (request filters win per-key)
|
||||
- pageSize: use request value (already set by normalize_pagination_dict)
|
||||
|
||||
Returns the (mutated) params, or a new minimal PaginationParams when
|
||||
params is None (so callers always get a valid object).
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, SortField
|
||||
if not viewConfig:
|
||||
return params
|
||||
|
||||
if params is None:
|
||||
params = PaginationParams(page=1, pageSize=25)
|
||||
|
||||
# Sort: request wins if non-empty
|
||||
if not params.sort and viewConfig.get("sort"):
|
||||
try:
|
||||
params.sort = [
|
||||
SortField(**s) if isinstance(s, dict) else s
|
||||
for s in viewConfig["sort"]
|
||||
]
|
||||
except Exception as e:
|
||||
logger.warning(f"applyViewToParams: could not parse view sort: {e}")
|
||||
|
||||
# Filters: deep-merge (request filters take priority per-key)
|
||||
viewFilters = viewConfig.get("filters") or {}
|
||||
if viewFilters:
|
||||
merged = dict(viewFilters)
|
||||
if params.filters:
|
||||
merged.update(params.filters)
|
||||
params.filters = merged
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def apply_strategy_b_filters_and_sort(
|
||||
items: List[Dict[str, Any]],
|
||||
pagination_params: Optional[PaginationParams],
|
||||
current_user: Any,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Shared in-memory filter + sort pass for Strategy B (files/prompts/connections lists).
|
||||
"""
|
||||
if not pagination_params:
|
||||
return list(items)
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(current_user)
|
||||
out = list(items)
|
||||
if pagination_params.filters:
|
||||
out = comp._applyFilters(out, pagination_params.filters)
|
||||
if pagination_params.sort:
|
||||
out = comp._applySorting(out, pagination_params.sort)
|
||||
return out
|
||||
|
||||
|
||||
def build_group_summary_groups(
|
||||
items: List[Dict[str, Any]],
|
||||
field: str,
|
||||
null_label: str = "—",
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Build {"value", "label", "totalCount"} for mode=groupSummary (single grouping level).
|
||||
"""
|
||||
from collections import defaultdict
|
||||
|
||||
counts: Dict[str, int] = defaultdict(int)
|
||||
display_by_key: Dict[str, str] = {}
|
||||
null_key = "\x00NULL"
|
||||
label_attr = f"{field}Label"
|
||||
|
||||
for item in items:
|
||||
raw = item.get(field)
|
||||
if raw is None or raw == "":
|
||||
nk = null_key
|
||||
display = null_label
|
||||
else:
|
||||
nk = str(raw)
|
||||
display = None
|
||||
lbl = item.get(label_attr)
|
||||
if lbl is not None and lbl != "":
|
||||
display = str(lbl)
|
||||
if display is None:
|
||||
display = nk
|
||||
counts[nk] += 1
|
||||
if nk not in display_by_key:
|
||||
display_by_key[nk] = display
|
||||
|
||||
ordered_keys = sorted(
|
||||
counts.keys(),
|
||||
key=lambda x: (x == null_key, str(display_by_key.get(x, x)).lower()),
|
||||
)
|
||||
itemIds = set()
|
||||
|
||||
return GroupingContext(groupTree=groupTree, itemIds=itemIds)
|
||||
return [
|
||||
{
|
||||
"value": None if nk == null_key else nk,
|
||||
"label": display_by_key.get(nk, nk),
|
||||
"totalCount": counts[nk],
|
||||
}
|
||||
for nk in ordered_keys
|
||||
]
|
||||
|
||||
|
||||
def applyGroupScopeFilter(items: List[Dict[str, Any]], itemIds: Optional[set]) -> List[Dict[str, Any]]:
|
||||
def buildGroupLayout(
|
||||
all_items: List[Dict[str, Any]],
|
||||
groupByLevels: List[Dict[str, Any]],
|
||||
page: int,
|
||||
pageSize: int,
|
||||
) -> tuple:
|
||||
"""
|
||||
Filter items to those whose "id" field is in itemIds.
|
||||
Returns items unchanged when itemIds is None (no active group scope).
|
||||
Works for both normal list items and for mode=ids / mode=filterValues flows
|
||||
— call it before handleIdsInMemory / handleFilterValuesInMemory.
|
||||
Apply multi-level grouping to all_items, slice to the requested page,
|
||||
and return (page_items, GroupLayout | None).
|
||||
|
||||
Strategy B: grouping operates on the full filtered+sorted candidate list.
|
||||
Items are stably re-sorted by the group path so that members of the same
|
||||
group are always contiguous (preserving the existing per-group sort order
|
||||
from the caller).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
all_items: fully filtered and user-sorted list of row dicts.
|
||||
groupByLevels: list of {"field": str, "nullLabel": str, "direction": "asc"|"desc"} dicts.
|
||||
page, pageSize: 1-based page index and page size.
|
||||
|
||||
Returns
|
||||
-------
|
||||
(page_items, GroupLayout | None)
|
||||
"""
|
||||
if itemIds is None:
|
||||
return items
|
||||
return [item for item in items if str(item.get("id", "")) in itemIds]
|
||||
from functools import cmp_to_key
|
||||
from modules.datamodels.datamodelPagination import GroupBand, GroupLayout
|
||||
|
||||
if not groupByLevels:
|
||||
offset = (page - 1) * pageSize
|
||||
return all_items[offset:offset + pageSize], None
|
||||
|
||||
levels = [lvl.get("field", "") for lvl in groupByLevels if lvl.get("field")]
|
||||
if not levels:
|
||||
offset = (page - 1) * pageSize
|
||||
return all_items[offset:offset + pageSize], None
|
||||
|
||||
nullLabels = {lvl.get("field", ""): lvl.get("nullLabel", "—") for lvl in groupByLevels}
|
||||
|
||||
def _path_key(item: dict) -> tuple:
|
||||
return tuple(
|
||||
str(item.get(f) or "") if item.get(f) is not None else nullLabels.get(f, "—")
|
||||
for f in levels
|
||||
)
|
||||
|
||||
def _item_cmp(a: dict, b: dict) -> int:
|
||||
pa, pb = _path_key(a), _path_key(b)
|
||||
for i in range(len(levels)):
|
||||
if pa[i] != pb[i]:
|
||||
asc = (groupByLevels[i].get("direction") or "asc").lower() != "desc"
|
||||
if pa[i] < pb[i]:
|
||||
return -1 if asc else 1
|
||||
return 1 if asc else -1
|
||||
return 0
|
||||
|
||||
# Sort by group path (per-level asc/desc); order within same path stays stable in Py3.12+
|
||||
all_items.sort(key=cmp_to_key(_item_cmp))
|
||||
|
||||
# Build global band list from the full sorted list
|
||||
bands_global: List[dict] = []
|
||||
current_path: Optional[tuple] = None
|
||||
current_start = 0
|
||||
for i, item in enumerate(all_items):
|
||||
path = _path_key(item)
|
||||
if path != current_path:
|
||||
if current_path is not None:
|
||||
bands_global.append({"path": list(current_path), "startIdx": current_start, "endIdx": i})
|
||||
current_path = path
|
||||
current_start = i
|
||||
if current_path is not None:
|
||||
bands_global.append({"path": list(current_path), "startIdx": current_start, "endIdx": len(all_items)})
|
||||
|
||||
# Slice to page
|
||||
page_start = (page - 1) * pageSize
|
||||
page_end = page_start + pageSize
|
||||
page_items = all_items[page_start:page_end]
|
||||
|
||||
# Find bands that have at least one row on this page
|
||||
bands_on_page: List[GroupBand] = []
|
||||
for band in bands_global:
|
||||
inter_start = max(band["startIdx"], page_start)
|
||||
inter_end = min(band["endIdx"], page_end)
|
||||
if inter_start >= inter_end:
|
||||
continue
|
||||
path_list = band["path"]
|
||||
bands_on_page.append(GroupBand(
|
||||
path=path_list,
|
||||
label=path_list[-1] if path_list else "—",
|
||||
startRowIndex=inter_start - page_start,
|
||||
rowCount=inter_end - inter_start,
|
||||
))
|
||||
|
||||
group_layout = GroupLayout(levels=levels, bands=bands_on_page) if bands_on_page else GroupLayout(levels=levels, bands=[])
|
||||
return page_items, group_layout
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ async def getSharepointFolderOptionsByReference(
|
|||
# Set access token on SharePoint service
|
||||
if not services.sharepoint.setAccessTokenFromConnection(connection):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
|
||||
)
|
||||
|
||||
|
|
|
|||
177
modules/routes/routeTableViews.py
Normal file
177
modules/routes/routeTableViews.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
CRUD endpoints for saved table views (TableListView).
|
||||
|
||||
A view stores a named preset of filters, sort order, and groupByLevels for a
|
||||
specific table (identified by contextKey). Views are per-user and optionally
|
||||
per-mandate.
|
||||
|
||||
Route prefix: /api/table-views
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Request
|
||||
from fastapi import status
|
||||
|
||||
from modules.auth import limiter, getCurrentUser
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
import modules.interfaces.interfaceDbApp as interfaceDbApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/table-views",
|
||||
tags=["Table Views"],
|
||||
responses={404: {"description": "Not found"}},
|
||||
)
|
||||
|
||||
|
||||
def _ownedOrRaise(view: Optional[TableListView], viewId: str, userId: str):
|
||||
"""Raise 404 when view is missing; ownership is implicitly guaranteed by the
|
||||
interface layer (views are always queried with the current userId)."""
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewId}' not found")
|
||||
return view
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# List views for a context
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("")
|
||||
@limiter.limit("60/minute")
|
||||
def list_views(
|
||||
request: Request,
|
||||
contextKey: str = Query(..., description="Table context key, e.g. 'connections', 'files/list'"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""List all saved views for the current user and contextKey."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
views = iface.getTableListViews(contextKey=contextKey)
|
||||
return [v.model_dump() if hasattr(v, "model_dump") else v for v in views]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Get one view
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/{viewKey}")
|
||||
@limiter.limit("60/minute")
|
||||
def get_view(
|
||||
request: Request,
|
||||
viewKey: str = Path(..., description="View slug"),
|
||||
contextKey: str = Query(..., description="Table context key"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""Return a single saved view by its viewKey."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
view = iface.getTableListView(contextKey=contextKey, viewKey=viewKey)
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewKey}' not found for context '{contextKey}'")
|
||||
return view.model_dump() if hasattr(view, "model_dump") else view
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Create a view
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED)
|
||||
@limiter.limit("30/minute")
|
||||
def create_view(
|
||||
request: Request,
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""
|
||||
Create a new saved view.
|
||||
|
||||
Body fields:
|
||||
- contextKey (required): table context key
|
||||
- viewKey (required): short slug, unique per (user, contextKey)
|
||||
- displayName (required): human-readable label
|
||||
- config (optional): view config dict with keys:
|
||||
schemaVersion, filters, sort, groupByLevels
|
||||
"""
|
||||
contextKey = body.get("contextKey")
|
||||
viewKey = body.get("viewKey")
|
||||
displayName = body.get("displayName")
|
||||
config = body.get("config") or {}
|
||||
|
||||
if not contextKey:
|
||||
raise HTTPException(status_code=400, detail="contextKey is required")
|
||||
if not viewKey:
|
||||
raise HTTPException(status_code=400, detail="viewKey is required")
|
||||
if not displayName:
|
||||
raise HTTPException(status_code=400, detail="displayName is required")
|
||||
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
try:
|
||||
view = iface.createTableListView(
|
||||
contextKey=contextKey,
|
||||
viewKey=viewKey,
|
||||
displayName=displayName,
|
||||
config=config,
|
||||
)
|
||||
return view.model_dump() if hasattr(view, "model_dump") else view
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=409, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"create_view failed: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to create view")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Update a view (by id)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.put("/{viewId}")
|
||||
@limiter.limit("30/minute")
|
||||
def update_view(
|
||||
request: Request,
|
||||
viewId: str = Path(..., description="View primary-key id (not viewKey)"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""
|
||||
Update an existing view.
|
||||
|
||||
Updatable fields: displayName, viewKey, config.
|
||||
The contextKey cannot be changed after creation.
|
||||
"""
|
||||
allowed = {"displayName", "viewKey", "config"}
|
||||
updates = {k: v for k, v in body.items() if k in allowed}
|
||||
if not updates:
|
||||
raise HTTPException(status_code=400, detail=f"No updatable fields provided. Allowed: {allowed}")
|
||||
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
try:
|
||||
updated = iface.updateTableListView(viewId=viewId, updates=updates)
|
||||
except Exception as e:
|
||||
logger.error(f"update_view failed: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to update view")
|
||||
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail=f"View id='{viewId}' not found")
|
||||
return updated.model_dump() if hasattr(updated, "model_dump") else updated
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Delete a view (by id)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.delete("/{viewId}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@limiter.limit("30/minute")
|
||||
def delete_view(
|
||||
request: Request,
|
||||
viewId: str = Path(..., description="View primary-key id"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""Delete a saved view by its primary-key id."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
deleted = iface.deleteTableListView(viewId=viewId)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail=f"View id='{viewId}' not found or could not be deleted")
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
"""ActionToolAdapter: wraps existing workflow actions (dynamicMode=True) as agent tools."""
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
|
||||
ToolDefinition, ToolResult
|
||||
|
|
@ -44,7 +44,7 @@ class ActionToolAdapter:
|
|||
compoundName = f"{shortName}_{actionName}"
|
||||
toolDef = _buildToolDefinition(compoundName, actionDef, actionInfo)
|
||||
|
||||
handler = _createDispatchHandler(self._actionExecutor, shortName, actionName)
|
||||
handler = _createDispatchHandler(self._actionExecutor, shortName, actionName, self._actionExecutor.services)
|
||||
toolRegistry.registerFromDefinition(toolDef, handler)
|
||||
self._registeredTools.append(compoundName)
|
||||
registered += 1
|
||||
|
|
@ -186,7 +186,7 @@ def _catalogTypeToJsonSchema(typeStr: str, _depth: int = 0) -> Dict[str, Any]:
|
|||
return {"type": "string", "description": f"unknown type '{typeStr}' (defaulted to string)"}
|
||||
|
||||
|
||||
def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
||||
def _createDispatchHandler(actionExecutor, methodName: str, actionName: str, services=None):
|
||||
"""Create an async handler that dispatches to the ActionExecutor.
|
||||
|
||||
Parameter validation and Ref-payload normalization (collapsing
|
||||
|
|
@ -204,7 +204,7 @@ def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
|||
if "mandateId" not in args and context.get("mandateId"):
|
||||
args["mandateId"] = context["mandateId"]
|
||||
result = await actionExecutor.executeAction(methodName, actionName, args)
|
||||
data = _formatActionResult(result)
|
||||
data = _formatActionResult(result, services, context)
|
||||
return ToolResult(
|
||||
toolCallId="",
|
||||
toolName=f"{methodName}_{actionName}",
|
||||
|
|
@ -223,9 +223,65 @@ def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
|||
return _handler
|
||||
|
||||
|
||||
def _formatActionResult(result) -> str:
|
||||
"""Format an ActionResult into a text representation for the agent."""
|
||||
_INLINE_CONTENT_LIMIT = 2000
|
||||
|
||||
|
||||
def _persistLargeDocument(doc, services, context: Dict[str, Any]) -> Optional[str]:
|
||||
"""Save an ActionDocument with large content as a workspace file.
|
||||
|
||||
Returns a formatted result line (with file id + docItem ref) or None
|
||||
if persistence is not possible.
|
||||
"""
|
||||
if not services:
|
||||
return None
|
||||
chatService = getattr(services, "chat", None)
|
||||
if not chatService:
|
||||
return None
|
||||
docData = getattr(doc, "documentData", None)
|
||||
if not docData or not isinstance(docData, str):
|
||||
return None
|
||||
docName = getattr(doc, "documentName", "unnamed")
|
||||
docBytes = docData.encode("utf-8")
|
||||
try:
|
||||
fileItem, _ = chatService.interfaceDbComponent.saveUploadedFile(docBytes, docName)
|
||||
fiId = context.get("featureInstanceId") or getattr(services, "featureInstanceId", "")
|
||||
if fiId:
|
||||
chatService.interfaceDbComponent.updateFile(fileItem.id, {"featureInstanceId": fiId})
|
||||
|
||||
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
|
||||
_attachFileAsChatDocument,
|
||||
_formatToolFileResult,
|
||||
_getOrCreateTempFolder,
|
||||
)
|
||||
tempFolderId = _getOrCreateTempFolder(chatService)
|
||||
if tempFolderId:
|
||||
chatService.interfaceDbComponent.updateFile(fileItem.id, {"folderId": tempFolderId})
|
||||
|
||||
chatDocId = _attachFileAsChatDocument(
|
||||
services, fileItem,
|
||||
label=f"action_doc:{docName}",
|
||||
userMessage=f"Action document: {docName}",
|
||||
)
|
||||
return _formatToolFileResult(
|
||||
fileItem=fileItem,
|
||||
chatDocId=chatDocId,
|
||||
actionLabel="Produced",
|
||||
extraInfo="Use readFile to read the content.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"_persistLargeDocument failed for {docName}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _formatActionResult(result, services=None, context: Optional[Dict[str, Any]] = None) -> str:
|
||||
"""Format an ActionResult into a text representation for the agent.
|
||||
|
||||
Documents whose content exceeds the inline limit are persisted as
|
||||
workspace files so the agent can access them via readFile /
|
||||
ai_process / searchInFileContent.
|
||||
"""
|
||||
parts = []
|
||||
ctx = context or {}
|
||||
|
||||
if result.resultLabel:
|
||||
parts.append(f"Result: {result.resultLabel}")
|
||||
|
|
@ -238,10 +294,19 @@ def _formatActionResult(result) -> str:
|
|||
for doc in result.documents:
|
||||
docName = getattr(doc, "documentName", "unnamed")
|
||||
docType = getattr(doc, "mimeType", "unknown")
|
||||
parts.append(f" - {docName} ({docType})")
|
||||
docData = getattr(doc, "documentData", None)
|
||||
if docData and isinstance(docData, str) and len(docData) < 2000:
|
||||
parts.append(f" Content: {docData[:2000]}")
|
||||
|
||||
isLarge = docData and isinstance(docData, str) and len(docData) >= _INLINE_CONTENT_LIMIT
|
||||
if isLarge:
|
||||
persistedLine = _persistLargeDocument(doc, services, ctx)
|
||||
if persistedLine:
|
||||
parts.append(f" - {docName} ({docType})")
|
||||
parts.append(f" {persistedLine}")
|
||||
continue
|
||||
|
||||
parts.append(f" - {docName} ({docType})")
|
||||
if docData and isinstance(docData, str) and len(docData) < _INLINE_CONTENT_LIMIT:
|
||||
parts.append(f" Content: {docData[:_INLINE_CONTENT_LIMIT]}")
|
||||
|
||||
if not parts:
|
||||
parts.append("Action completed successfully." if result.success else "Action failed.")
|
||||
|
|
|
|||
|
|
@ -198,7 +198,10 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
|
|||
|
||||
if isinstance(result, _DR):
|
||||
fileBytes = result.data
|
||||
fileName = result.fileName or fileName
|
||||
resolvedName = result.fileName or fileName
|
||||
if resolvedName != fileName:
|
||||
logger.debug(f"downloadFromDataSource: connector fileName={result.fileName!r} overrides arg fileName={fileName!r}")
|
||||
fileName = resolvedName
|
||||
else:
|
||||
fileBytes = result
|
||||
|
||||
|
|
|
|||
|
|
@ -61,33 +61,7 @@ async def _getOrCreateInstanceGroup(
|
|||
featureInstanceId: str,
|
||||
contextKey: str = "files/list",
|
||||
) -> Optional[str]:
|
||||
"""Return groupId of the default group for a feature instance; create if needed."""
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [
|
||||
n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
for n in (existing.rootGroups if existing else [])
|
||||
]
|
||||
|
||||
def _find(nds):
|
||||
for nd in nds:
|
||||
meta = nd.get("meta", {}) if isinstance(nd, dict) else getattr(nd, "meta", {})
|
||||
if (meta or {}).get("featureInstanceId") == featureInstanceId:
|
||||
return nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
found = _find(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []))
|
||||
if found:
|
||||
return found
|
||||
return None
|
||||
|
||||
found = _find(nodes)
|
||||
if found:
|
||||
return found
|
||||
newId = str(uuid.uuid4())
|
||||
nodes.append({"id": newId, "name": featureInstanceId, "itemIds": [], "subGroups": [], "meta": {"featureInstanceId": featureInstanceId}})
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return newId
|
||||
except Exception as e:
|
||||
logger.error(f"_getOrCreateInstanceGroup: {e}")
|
||||
"""Stub — file group tree removed. Returns None; callers that checked the result will skip group assignment."""
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -96,8 +70,8 @@ async def _getOrCreateTempGroup(
|
|||
sessionId: str,
|
||||
contextKey: str = "files/list",
|
||||
) -> Optional[str]:
|
||||
"""Return groupId of a temporary group for a session; create if needed."""
|
||||
return await _getOrCreateInstanceGroup(appInterface, f"_temp_{sessionId}", contextKey)
|
||||
"""Stub — file group tree removed. Returns None."""
|
||||
return None
|
||||
|
||||
|
||||
def _attachFileAsChatDocument(
|
||||
|
|
|
|||
|
|
@ -836,7 +836,7 @@ def _registerMediaTools(registry: ToolRegistry, services):
|
|||
return ToolResult(toolCallId="", toolName="executeCode", success=False, error=f"Language '{language}' not supported. Only 'python' is available.")
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceAgent.sandboxExecutor import executePython
|
||||
result = await executePython(code)
|
||||
result = await executePython(code, services=services)
|
||||
if result.get("success"):
|
||||
output = result.get("output", "(no output)")
|
||||
return ToolResult(toolCallId="", toolName="executeCode", success=True, data=output)
|
||||
|
|
@ -886,12 +886,17 @@ def _registerMediaTools(registry: ToolRegistry, services):
|
|||
readOnly=True
|
||||
)
|
||||
|
||||
from modules.serviceCenter.services.serviceAgent.sandboxExecutor import SANDBOX_ALLOWED_MODULES
|
||||
moduleList = ", ".join(sorted(SANDBOX_ALLOWED_MODULES | {"io"}))
|
||||
registry.register(
|
||||
"executeCode", _executeCode,
|
||||
description=(
|
||||
"Execute Python code in a sandboxed environment for calculations and data analysis. "
|
||||
"Available modules: math, statistics, json, csv, re, datetime, collections, itertools, functools, decimal, fractions, random. "
|
||||
"No file system, network, or OS access. Max 30s execution time. "
|
||||
f"Execute Python code in a sandboxed environment for calculations and data analysis. "
|
||||
f"Available modules: {moduleList}. "
|
||||
"io is restricted to StringIO and BytesIO only (no file access). "
|
||||
"Built-in readFile(fileId) returns UTF-8 content of a workspace file by its file ID "
|
||||
"(use the 'file id' from tool outputs, e.g. data = readFile('019af...')). "
|
||||
"No other file system, network, or OS access. Max 30s execution time. "
|
||||
"Use print() to produce output."
|
||||
),
|
||||
parameters={
|
||||
|
|
|
|||
|
|
@ -312,52 +312,7 @@ def _registerWorkspaceTools(registry: ToolRegistry, services):
|
|||
fiId = context.get("featureInstanceId") or (services.featureInstanceId if services else "")
|
||||
if fiId:
|
||||
dbMgmt.updateFile(fileItem.id, {"featureInstanceId": fiId})
|
||||
if args.get("groupId"):
|
||||
try:
|
||||
appIface = chatService.interfaceDbApp
|
||||
existing = appIface.getTableGrouping("files/list")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
def _addToGroup(nds, gid, fid):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == gid:
|
||||
ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fid not in ids:
|
||||
ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = ids
|
||||
return True
|
||||
if _addToGroup(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []), gid, fid):
|
||||
return True
|
||||
return False
|
||||
_addToGroup(nodes, args["groupId"], fileItem.id)
|
||||
appIface.upsertTableGrouping("files/list", nodes)
|
||||
except Exception as _ge:
|
||||
logger.warning(f"writeFile: failed to add file to group {args['groupId']}: {_ge}")
|
||||
elif fiId:
|
||||
try:
|
||||
appIface = chatService.interfaceDbApp
|
||||
instanceGroupId = await _getOrCreateInstanceGroup(appIface, fiId)
|
||||
if instanceGroupId:
|
||||
existing = appIface.getTableGrouping("files/list")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
def _addToGroup2(nds, gid, fid):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == gid:
|
||||
ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fid not in ids:
|
||||
ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = ids
|
||||
return True
|
||||
if _addToGroup2(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []), gid, fid):
|
||||
return True
|
||||
return False
|
||||
_addToGroup2(nodes, instanceGroupId, fileItem.id)
|
||||
appIface.upsertTableGrouping("files/list", nodes)
|
||||
except Exception as _ge:
|
||||
logger.warning(f"writeFile: failed to add file to instance group for {fiId}: {_ge}")
|
||||
# File group tree removed — groupId arg and instance-group assignment no longer apply
|
||||
if args.get("tags"):
|
||||
dbMgmt.updateFile(fileItem.id, {"tags": args["tags"]})
|
||||
|
||||
|
|
@ -746,136 +701,7 @@ def _registerWorkspaceTools(registry: ToolRegistry, services):
|
|||
readOnly=False
|
||||
)
|
||||
|
||||
# ---- Group tools (replaces folder-based tools) ----
|
||||
|
||||
async def _listGroups(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
try:
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=True, data="No groups found.")
|
||||
|
||||
def _flatten(nodes, depth=0):
|
||||
result = []
|
||||
for n in nodes:
|
||||
nd = n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
result.append({"id": nd.get("id"), "name": nd.get("name"), "depth": depth, "itemCount": len(nd.get("itemIds", []))})
|
||||
result.extend(_flatten(nd.get("subGroups", []), depth + 1))
|
||||
return result
|
||||
|
||||
groups = _flatten(existing.rootGroups)
|
||||
lines = "\n".join(
|
||||
f"{' ' * g['depth']}- {g['name']} (id: {g['id']}, items: {g['itemCount']})"
|
||||
for g in groups
|
||||
) if groups else "No groups found."
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=True, data=lines)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=False, error=str(e))
|
||||
|
||||
async def _listItemsInGroup(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
groupId = args.get("groupId", "")
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
if not groupId:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=False, error="groupId is required")
|
||||
try:
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=True, data="No groups found.")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in existing.rootGroups]
|
||||
ids = _collectItemIds(nodes, groupId)
|
||||
itemList = list(ids) if ids else []
|
||||
return ToolResult(
|
||||
toolCallId="", toolName="listItemsInGroup", success=True,
|
||||
data="\n".join(f"- {fid}" for fid in itemList) if itemList else "No items in group.",
|
||||
)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=False, error=str(e))
|
||||
|
||||
async def _addItemsToGroup(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
groupId = args.get("groupId", "")
|
||||
itemIds = args.get("itemIds", [])
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
if not groupId:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error="groupId is required")
|
||||
if not itemIds:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error="itemIds is required")
|
||||
try:
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
|
||||
def _add(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == groupId:
|
||||
existing_ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
for fid in itemIds:
|
||||
if fid not in existing_ids:
|
||||
existing_ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = existing_ids
|
||||
return True
|
||||
if _add(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])):
|
||||
return True
|
||||
return False
|
||||
|
||||
found = _add(nodes)
|
||||
if not found:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error=f"Group {groupId} not found")
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return ToolResult(
|
||||
toolCallId="", toolName="addItemsToGroup", success=True,
|
||||
data=f"Added {len(itemIds)} item(s) to group {groupId}",
|
||||
)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error=str(e))
|
||||
|
||||
registry.register(
|
||||
"listGroups", _listGroups,
|
||||
description="List all groups in the file grouping tree. Groups replace folders for organising files.",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
}
|
||||
},
|
||||
readOnly=True
|
||||
)
|
||||
|
||||
registry.register(
|
||||
"listItemsInGroup", _listItemsInGroup,
|
||||
description="List all file IDs assigned to a specific group (includes sub-groups recursively).",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"groupId": {"type": "string", "description": "The group ID to inspect"},
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
},
|
||||
"required": ["groupId"]
|
||||
},
|
||||
readOnly=True
|
||||
)
|
||||
|
||||
registry.register(
|
||||
"addItemsToGroup", _addItemsToGroup,
|
||||
description="Add one or more file IDs to an existing group.",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"groupId": {"type": "string", "description": "The group ID to add files to"},
|
||||
"itemIds": {"type": "array", "items": {"type": "string"}, "description": "List of file IDs to add"},
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
},
|
||||
"required": ["groupId", "itemIds"]
|
||||
},
|
||||
readOnly=False
|
||||
)
|
||||
# Group tree tools removed — file grouping now uses view-based display grouping (TableListView)
|
||||
|
||||
registry.register(
|
||||
"replaceInFile", _replaceInFile,
|
||||
|
|
|
|||
|
|
@ -69,7 +69,15 @@ class _ServicesAdapter:
|
|||
|
||||
@property
|
||||
def workflow(self):
|
||||
return self._context.workflow
|
||||
return getattr(self, "_workflow_override", None) or self._context.workflow
|
||||
|
||||
@workflow.setter
|
||||
def workflow(self, value):
|
||||
self._workflow_override = value
|
||||
try:
|
||||
self._context.workflow = value
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
|
||||
@property
|
||||
def ai(self):
|
||||
|
|
@ -95,6 +103,13 @@ class _ServicesAdapter:
|
|||
def extraction(self):
|
||||
return self._getService("extraction")
|
||||
|
||||
@property
|
||||
def interfaceDbComponent(self):
|
||||
try:
|
||||
return self.chat.interfaceDbComponent
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@property
|
||||
def rbac(self):
|
||||
"""Same RbacClass as workflow hub (MethodBase permission checks during discoverMethods)."""
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ from typing import Dict, Any
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_PYTHON_ALLOWED_MODULES = {
|
||||
"math", "statistics", "json", "csv", "re", "datetime",
|
||||
SANDBOX_ALLOWED_MODULES = {
|
||||
"math", "statistics", "json", "csv", "re", "datetime", "time",
|
||||
"collections", "itertools", "functools", "decimal", "fractions",
|
||||
"random", "string", "textwrap", "operator", "copy",
|
||||
}
|
||||
|
|
@ -19,17 +19,33 @@ _PYTHON_ALLOWED_MODULES = {
|
|||
_PYTHON_BLOCKED_BUILTINS = {
|
||||
"open", "exec", "eval", "compile", "__import__", "globals", "locals",
|
||||
"getattr", "setattr", "delattr", "breakpoint", "exit", "quit",
|
||||
"input", "memoryview", "type",
|
||||
"input", "memoryview",
|
||||
}
|
||||
|
||||
_MAX_EXECUTION_TIME_S = 30
|
||||
_MAX_OUTPUT_CHARS = 50000
|
||||
|
||||
|
||||
_RESTRICTED_IO = None
|
||||
|
||||
def _getRestrictedIo():
|
||||
"""Return a restricted ``io`` module exposing only StringIO/BytesIO."""
|
||||
global _RESTRICTED_IO
|
||||
if _RESTRICTED_IO is None:
|
||||
import types
|
||||
m = types.ModuleType("io")
|
||||
m.StringIO = io.StringIO
|
||||
m.BytesIO = io.BytesIO
|
||||
_RESTRICTED_IO = m
|
||||
return _RESTRICTED_IO
|
||||
|
||||
|
||||
def _safeImport(name, *args, **kwargs):
|
||||
"""Restricted import that only allows whitelisted modules."""
|
||||
if name not in _PYTHON_ALLOWED_MODULES:
|
||||
raise ImportError(f"Module '{name}' is not allowed. Permitted: {', '.join(sorted(_PYTHON_ALLOWED_MODULES))}")
|
||||
if name == "io":
|
||||
return _getRestrictedIo()
|
||||
if name not in SANDBOX_ALLOWED_MODULES:
|
||||
raise ImportError(f"Module '{name}' is not allowed. Permitted: io (StringIO/BytesIO only), {', '.join(sorted(SANDBOX_ALLOWED_MODULES))}")
|
||||
return __builtins__["__import__"](name, *args, **kwargs) if isinstance(__builtins__, dict) else __import__(name, *args, **kwargs)
|
||||
|
||||
|
||||
|
|
@ -48,7 +64,7 @@ def _buildRestrictedGlobals() -> Dict[str, Any]:
|
|||
safeBuiltins["__name__"] = "__sandbox__"
|
||||
safeBuiltins["__builtins__"] = safeBuiltins
|
||||
|
||||
for modName in _PYTHON_ALLOWED_MODULES:
|
||||
for modName in SANDBOX_ALLOWED_MODULES:
|
||||
try:
|
||||
safeBuiltins[modName] = __import__(modName)
|
||||
except ImportError:
|
||||
|
|
@ -57,12 +73,27 @@ def _buildRestrictedGlobals() -> Dict[str, Any]:
|
|||
return {"__builtins__": safeBuiltins}
|
||||
|
||||
|
||||
async def executePython(code: str) -> Dict[str, Any]:
|
||||
def _makeReadFile(services):
|
||||
"""Create a readFile(fileId) closure bound to the current services context."""
|
||||
def readFile(fileId: str) -> str:
|
||||
mgmt = getattr(services, 'interfaceDbComponent', None) if services else None
|
||||
if not mgmt:
|
||||
raise RuntimeError("readFile: no file store available in this session")
|
||||
data = mgmt.getFileData(str(fileId))
|
||||
if data is None:
|
||||
raise FileNotFoundError(f"File '{fileId}' not found in workspace")
|
||||
return data.decode("utf-8")
|
||||
return readFile
|
||||
|
||||
|
||||
async def executePython(code: str, *, services=None) -> Dict[str, Any]:
|
||||
"""Execute Python code in a restricted sandbox. Returns {success, output, error}."""
|
||||
import asyncio
|
||||
|
||||
def _run():
|
||||
restrictedGlobals = _buildRestrictedGlobals()
|
||||
if services:
|
||||
restrictedGlobals["__builtins__"]["readFile"] = _makeReadFile(services)
|
||||
capturedOutput = io.StringIO()
|
||||
oldStdout = sys.stdout
|
||||
oldStderr = sys.stderr
|
||||
|
|
|
|||
|
|
@ -57,8 +57,7 @@ from .subJsonResponseHandling import JsonResponseHandler
|
|||
from .subLoopingUseCases import LoopingUseCaseRegistry
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
from modules.shared.jsonContinuation import getContexts
|
||||
from modules.shared.jsonUtils import buildContinuationContext, extractJsonString, tryParseJson
|
||||
from modules.shared.jsonUtils import tryParseJson
|
||||
from modules.shared.jsonUtils import buildContinuationContext, tryParseJson
|
||||
from modules.shared.jsonUtils import closeJsonStructures
|
||||
from modules.shared.jsonUtils import stripCodeFences, normalizeJsonText
|
||||
|
||||
|
|
@ -142,6 +141,8 @@ class AiCallLooper:
|
|||
MAX_MERGE_FAILS = 3
|
||||
mergeFailCount = 0 # Global counter for merge failures across entire loop
|
||||
lastValidCompletePart = None # Store last successfully parsed completePart for fallback
|
||||
MAX_CONSECUTIVE_EMPTY_RESPONSES = 3
|
||||
consecutive_empty_responses = 0
|
||||
|
||||
# Get parent operation ID for iteration operations (parentId should be operationId, not log entry ID)
|
||||
parentOperationId = operationId # Use the parent's operationId directly
|
||||
|
|
@ -284,8 +285,26 @@ class AiCallLooper:
|
|||
break
|
||||
|
||||
if not result or not result.strip():
|
||||
logger.warning(f"Iteration {iteration}: Empty response, stopping")
|
||||
consecutive_empty_responses += 1
|
||||
logger.warning(
|
||||
"Iteration %s: Empty AI response (consecutive %s/%s) modelName=%s errorCount=%s",
|
||||
iteration,
|
||||
consecutive_empty_responses,
|
||||
MAX_CONSECUTIVE_EMPTY_RESPONSES,
|
||||
getattr(response, "modelName", None),
|
||||
getattr(response, "errorCount", None),
|
||||
)
|
||||
if iterationOperationId:
|
||||
self.services.chat.progressLogFinish(iterationOperationId, False)
|
||||
if consecutive_empty_responses >= MAX_CONSECUTIVE_EMPTY_RESPONSES:
|
||||
logger.error(
|
||||
"Stopping loop: %s consecutive empty responses from model",
|
||||
consecutive_empty_responses,
|
||||
)
|
||||
break
|
||||
continue
|
||||
|
||||
consecutive_empty_responses = 0
|
||||
|
||||
# Check if this is a text response (not document generation)
|
||||
# Text responses don't need JSON parsing - return immediately after first successful response
|
||||
|
|
@ -354,9 +373,8 @@ class AiCallLooper:
|
|||
|
||||
if lastValidCompletePart:
|
||||
try:
|
||||
extracted = extractJsonString(lastValidCompletePart)
|
||||
parsed, parseErr, _ = tryParseJson(extracted)
|
||||
if parseErr is None and parsed:
|
||||
parsed, parseErr, _ = tryParseJson(lastValidCompletePart)
|
||||
if parseErr is None:
|
||||
normalized = self._normalizeJsonStructure(parsed, useCase)
|
||||
return json.dumps(normalized, indent=2, ensure_ascii=False)
|
||||
except Exception:
|
||||
|
|
@ -384,11 +402,10 @@ class AiCallLooper:
|
|||
# This ensures retry iterations use the correct base context
|
||||
lastRawResponse = candidateJson
|
||||
|
||||
# Try direct parse of candidate
|
||||
# Try direct parse of candidate (same pipeline as structure filling / getContexts)
|
||||
try:
|
||||
extracted = extractJsonString(candidateJson)
|
||||
parsed, parseErr, _ = tryParseJson(extracted)
|
||||
if parseErr is None and parsed:
|
||||
parsed, parseErr, extracted = tryParseJson(candidateJson)
|
||||
if parseErr is None:
|
||||
# Direct parse succeeded - FINISHED
|
||||
# Commit candidate to jsonBase
|
||||
jsonBase = candidateJson
|
||||
|
|
@ -421,21 +438,18 @@ class AiCallLooper:
|
|||
|
||||
# STEP 6: DECIDE based on jsonParsingSuccess and overlapContext
|
||||
if contexts.jsonParsingSuccess and contexts.overlapContext == "":
|
||||
# JSON is complete (no cut point) - FINISHED
|
||||
# Use completePart for final result (closed, repaired JSON)
|
||||
# No more merging needed, so we don't need the cut version
|
||||
jsonBase = contexts.completePart
|
||||
# getContexts and downstream must agree with tryParseJson (same as structure filling).
|
||||
logger.info(f"Iteration {iteration}: jsonParsingSuccess=true, overlapContext='', JSON complete")
|
||||
|
||||
# Store and parse completePart
|
||||
lastValidCompletePart = contexts.completePart
|
||||
|
||||
try:
|
||||
extracted = extractJsonString(contexts.completePart)
|
||||
parsed, parseErr, _ = tryParseJson(extracted)
|
||||
if parseErr is None and parsed:
|
||||
parsed, parseErr, extracted = tryParseJson(contexts.completePart)
|
||||
if parseErr is not None:
|
||||
raise ValueError(str(parseErr))
|
||||
normalized = self._normalizeJsonStructure(parsed, useCase)
|
||||
result = json.dumps(normalized, indent=2, ensure_ascii=False)
|
||||
jsonBase = contexts.completePart
|
||||
|
||||
if iterationOperationId:
|
||||
self.services.chat.progressLogFinish(iterationOperationId, True)
|
||||
|
|
@ -448,12 +462,26 @@ class AiCallLooper:
|
|||
result, normalized, extracted, debugPrefix, self.services
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Iteration {iteration}: Failed to parse completePart: {e}")
|
||||
|
||||
# Fallback: return completePart as-is
|
||||
logger.warning(
|
||||
f"Iteration {iteration}: completePart not serializable after getContexts success: {e}"
|
||||
)
|
||||
mergeFailCount += 1
|
||||
if mergeFailCount >= MAX_MERGE_FAILS:
|
||||
logger.error(
|
||||
f"Iteration {iteration}: Max failures ({MAX_MERGE_FAILS}) "
|
||||
"after output pipeline mismatch"
|
||||
)
|
||||
if iterationOperationId:
|
||||
self.services.chat.progressLogFinish(iterationOperationId, False)
|
||||
return jsonBase if jsonBase else ""
|
||||
if iterationOperationId:
|
||||
self.services.chat.progressLogUpdate(
|
||||
iterationOperationId,
|
||||
0.7,
|
||||
f"Output pipeline failed ({mergeFailCount}/{MAX_MERGE_FAILS}), retrying",
|
||||
)
|
||||
self.services.chat.progressLogFinish(iterationOperationId, True)
|
||||
return contexts.completePart
|
||||
continue
|
||||
|
||||
elif contexts.jsonParsingSuccess and contexts.overlapContext != "":
|
||||
# JSON parseable but has cut point - CONTINUE to next iteration
|
||||
|
|
@ -502,9 +530,8 @@ class AiCallLooper:
|
|||
|
||||
if lastValidCompletePart:
|
||||
try:
|
||||
extracted = extractJsonString(lastValidCompletePart)
|
||||
parsed, parseErr, _ = tryParseJson(extracted)
|
||||
if parseErr is None and parsed:
|
||||
parsed, parseErr, _ = tryParseJson(lastValidCompletePart)
|
||||
if parseErr is None:
|
||||
normalized = self._normalizeJsonStructure(parsed, useCase)
|
||||
return json.dumps(normalized, indent=2, ensure_ascii=False)
|
||||
except Exception:
|
||||
|
|
@ -532,10 +559,36 @@ class AiCallLooper:
|
|||
if iteration >= maxIterations:
|
||||
logger.warning(f"AI call stopped after maximum iterations ({maxIterations})")
|
||||
|
||||
<<<<<<< HEAD
|
||||
# Prefer last repaired complete JSON from getContexts (raw `result` is only the last fragment).
|
||||
if lastValidCompletePart and useCase and not useCase.requiresExtraction:
|
||||
try:
|
||||
parsed, parseErr, extracted = tryParseJson(lastValidCompletePart)
|
||||
if parseErr is None:
|
||||
normalized = self._normalizeJsonStructure(parsed, useCase)
|
||||
out = json.dumps(normalized, indent=2, ensure_ascii=False)
|
||||
if useCase.finalResultHandler:
|
||||
logger.warning(
|
||||
"callAiWithLooping: max iterations — returning last valid completePart for %r",
|
||||
useCaseId,
|
||||
)
|
||||
return useCase.finalResultHandler(
|
||||
out, normalized, extracted, debugPrefix, self.services
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Max-iterations fallback on completePart failed: %s", e)
|
||||
|
||||
=======
|
||||
# This code path should never be reached because all registered use cases
|
||||
# return early when JSON is complete. This would only execute for use cases that
|
||||
# require section extraction, but no such use cases are currently registered.
|
||||
logger.error(f"Unexpected code path: reached end of loop without return for use case '{useCaseId}'")
|
||||
>>>>>>> 875f8252 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
logger.error(
|
||||
"End of callAiWithLooping without success for use case %r (iterations=%s, lastResultLen=%s)",
|
||||
useCaseId,
|
||||
iteration,
|
||||
len(result) if isinstance(result, str) else 0,
|
||||
)
|
||||
return result if result else ""
|
||||
|
||||
def _isJsonStringIncomplete(self, jsonString: str) -> bool:
|
||||
|
|
|
|||
|
|
@ -54,6 +54,15 @@ def _handleCodeContentFinalResult(result: str, parsedJsonForUseCase: Any, extrac
|
|||
return final_json
|
||||
|
||||
|
||||
def _lift_section_plain_text(d: Dict[str, Any]) -> Optional[str]:
|
||||
"""Models often return {\"text\": \"...\"} without an elements array; extract usable prose."""
|
||||
for key in ("text", "body", "summary", "response", "output", "answer", "message", "content"):
|
||||
v = d.get(key)
|
||||
if isinstance(v, str) and v.strip():
|
||||
return v.strip()
|
||||
return None
|
||||
|
||||
|
||||
def _normalizeSectionContentJson(parsed: Any, useCaseId: str) -> Any:
|
||||
"""Normalize JSON structure for section_content use case."""
|
||||
# For section_content, expect {"elements": [...]} structure
|
||||
|
|
@ -77,14 +86,28 @@ def _normalizeSectionContentJson(parsed: Any, useCaseId: str) -> Any:
|
|||
# Convert plain list of elements to elements structure
|
||||
return {"elements": parsed}
|
||||
elif isinstance(parsed, dict):
|
||||
# If it already has "elements", return as-is
|
||||
if "elements" in parsed:
|
||||
els = parsed.get("elements")
|
||||
if isinstance(els, list) and len(els) > 0:
|
||||
return parsed
|
||||
# If it has "type" and looks like an element, wrap in elements array
|
||||
elif parsed.get("type"):
|
||||
lifted = _lift_section_plain_text(parsed)
|
||||
if lifted:
|
||||
out = dict(parsed)
|
||||
out["elements"] = [{"type": "paragraph", "content": {"text": lifted}}]
|
||||
logger.info(
|
||||
"section_content: promoted plain-text field to elements (%d chars)",
|
||||
len(lifted),
|
||||
)
|
||||
return out
|
||||
return parsed
|
||||
if parsed.get("type"):
|
||||
return {"elements": [parsed]}
|
||||
# Otherwise, assume it's already in correct format
|
||||
else:
|
||||
lifted = _lift_section_plain_text(parsed)
|
||||
if lifted:
|
||||
return {
|
||||
**parsed,
|
||||
"elements": [{"type": "paragraph", "content": {"text": lifted}}],
|
||||
}
|
||||
return parsed
|
||||
|
||||
# For other use cases, return as-is (they have their own structures)
|
||||
|
|
|
|||
|
|
@ -27,6 +27,36 @@ class _AiResponseFallback:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _elements_from_section_content_ai_json(parsed: Any) -> List[Any]:
|
||||
"""Normalize section_content AI JSON (incl. models that return {\"text\": ...}) into elements."""
|
||||
from modules.serviceCenter.services.serviceAi.subLoopingUseCases import _normalizeSectionContentJson
|
||||
|
||||
if parsed is None:
|
||||
return []
|
||||
if isinstance(parsed, dict):
|
||||
has_nonempty_elements = (
|
||||
isinstance(parsed.get("elements"), list) and len(parsed["elements"]) > 0
|
||||
)
|
||||
if not has_nonempty_elements:
|
||||
# Valid full-document envelope (same normalized shape the renderer uses elsewhere)
|
||||
docs = parsed.get("documents")
|
||||
if isinstance(docs, list) and docs and isinstance(docs[0], dict):
|
||||
secs = docs[0].get("sections")
|
||||
if isinstance(secs, list) and secs and isinstance(secs[0], dict):
|
||||
parsed = secs[0]
|
||||
elif (
|
||||
isinstance(parsed.get("sections"), list)
|
||||
and parsed["sections"]
|
||||
and isinstance(parsed["sections"][0], dict)
|
||||
):
|
||||
parsed = parsed["sections"][0]
|
||||
norm = _normalizeSectionContentJson(parsed, "section_content")
|
||||
if isinstance(norm, dict):
|
||||
els = norm.get("elements")
|
||||
return list(els) if isinstance(els, list) else []
|
||||
return []
|
||||
|
||||
|
||||
class StructureFiller:
|
||||
"""Handles filling document structure with content."""
|
||||
|
||||
|
|
@ -524,36 +554,10 @@ class StructureFiller:
|
|||
if generatedElements:
|
||||
elements.extend(generatedElements)
|
||||
else:
|
||||
# Fallback: Try to parse JSON response directly with repair logic
|
||||
try:
|
||||
from modules.shared.jsonUtils import tryParseJson, repairBrokenJson
|
||||
|
||||
# Use tryParseJson which handles extraction and basic parsing
|
||||
fallbackElements, parseError, cleanedStr = tryParseJson(aiResponse.content)
|
||||
|
||||
# If parsing failed, try repair
|
||||
if parseError and isinstance(aiResponse.content, str):
|
||||
logger.warning(f"Initial JSON parse failed for section {sectionId}, attempting repair: {str(parseError)}")
|
||||
repairedJson = repairBrokenJson(aiResponse.content)
|
||||
if repairedJson:
|
||||
fallbackElements = repairedJson
|
||||
parseError = None
|
||||
logger.info(f"Successfully repaired JSON for section {sectionId}")
|
||||
|
||||
if parseError:
|
||||
raise parseError
|
||||
|
||||
if isinstance(fallbackElements, list):
|
||||
elements.extend(fallbackElements)
|
||||
elif isinstance(fallbackElements, dict) and "elements" in fallbackElements:
|
||||
elements.extend(fallbackElements["elements"])
|
||||
elif isinstance(fallbackElements, dict) and fallbackElements.get("type"):
|
||||
elements.append(fallbackElements)
|
||||
except (json.JSONDecodeError, ValueError) as json_error:
|
||||
logger.error(f"Error parsing JSON response for section {sectionId}: {str(json_error)}")
|
||||
logger.error(f"No elements produced for section {sectionId} (callAiWithLooping must return parseable JSON)")
|
||||
elements.append({
|
||||
"type": "error",
|
||||
"message": f"Failed to parse JSON response: {str(json_error)}",
|
||||
"message": f"No parsed content for section {sectionId}",
|
||||
"sectionId": sectionId
|
||||
})
|
||||
|
||||
|
|
@ -671,7 +675,7 @@ class StructureFiller:
|
|||
try:
|
||||
self.services.chat.progressLogUpdate(sectionOperationId, 0.4, "Calling AI for content generation")
|
||||
|
||||
operationType = OperationTypeEnum.DATA_ANALYSE
|
||||
operationType = OperationTypeEnum.DATA_GENERATE
|
||||
options = AiCallOptions(
|
||||
operationType=operationType,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
|
|
@ -703,22 +707,17 @@ class StructureFiller:
|
|||
)
|
||||
|
||||
try:
|
||||
from modules.shared.jsonUtils import tryParseJson, repairBrokenJson
|
||||
from modules.shared.jsonUtils import tryParseJson
|
||||
|
||||
if isinstance(aiResponseJson, str) and ("---" in aiResponseJson or aiResponseJson.count("```json") > 1):
|
||||
generatedElements = self._extractAndMergeMultipleJsonBlocks(aiResponseJson, contentType, sectionId)
|
||||
else:
|
||||
parsedResponse, parseError, cleanedStr = tryParseJson(aiResponseJson)
|
||||
if parsedResponse is None:
|
||||
logger.warning(f"Section {sectionId}: tryParseJson failed, attempting repair")
|
||||
repairedStr = repairBrokenJson(aiResponseJson)
|
||||
parsedResponse, parseError2, _ = tryParseJson(repairedStr)
|
||||
|
||||
if parsedResponse and isinstance(parsedResponse, dict):
|
||||
generatedElements = parsedResponse.get("elements", [])
|
||||
elif parsedResponse and isinstance(parsedResponse, list):
|
||||
generatedElements = parsedResponse
|
||||
else:
|
||||
parsedResponse, parseError, _ = tryParseJson(aiResponseJson)
|
||||
if parseError is not None:
|
||||
logger.error(f"Section {sectionId}: tryParseJson failed: {parseError}")
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = _elements_from_section_content_ai_json(parsedResponse)
|
||||
except Exception as parseErr:
|
||||
logger.error(f"Section {sectionId}: JSON parse error: {parseErr}")
|
||||
generatedElements = []
|
||||
|
|
@ -930,7 +929,7 @@ class StructureFiller:
|
|||
|
||||
self.services.chat.progressLogUpdate(sectionOperationId, 0.4, "Calling AI for content generation")
|
||||
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_ANALYSE
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_GENERATE
|
||||
|
||||
if operationType == OperationTypeEnum.IMAGE_GENERATE:
|
||||
maxPromptLength = 4000
|
||||
|
|
@ -996,43 +995,16 @@ class StructureFiller:
|
|||
)
|
||||
|
||||
try:
|
||||
# Use tryParseJson which handles extraction and basic parsing
|
||||
from modules.shared.jsonUtils import tryParseJson, repairBrokenJson
|
||||
from modules.shared.jsonUtils import tryParseJson
|
||||
|
||||
# Check if response contains multiple JSON blocks (separated by --- or multiple ```json blocks)
|
||||
# This can happen when AI returns multiple complete responses
|
||||
if isinstance(aiResponseJson, str) and ("---" in aiResponseJson or aiResponseJson.count("```json") > 1):
|
||||
logger.info(f"Section {sectionId}: Detected multiple JSON blocks in response, attempting to merge")
|
||||
generatedElements = self._extractAndMergeMultipleJsonBlocks(aiResponseJson, contentType, sectionId)
|
||||
else:
|
||||
parsedResponse, parseError, cleanedStr = tryParseJson(aiResponseJson)
|
||||
|
||||
# If parsing failed, try repair
|
||||
if parseError and isinstance(aiResponseJson, str):
|
||||
logger.warning(f"Initial JSON parse failed for section {sectionId}, attempting repair: {str(parseError)}")
|
||||
repairedJson = repairBrokenJson(aiResponseJson)
|
||||
if repairedJson:
|
||||
parsedResponse = repairedJson
|
||||
parseError = None
|
||||
logger.info(f"Successfully repaired JSON for section {sectionId}")
|
||||
|
||||
if parseError:
|
||||
parsedResponse, parseError, _ = tryParseJson(aiResponseJson)
|
||||
if parseError is not None:
|
||||
raise parseError
|
||||
|
||||
if isinstance(parsedResponse, list):
|
||||
generatedElements = parsedResponse
|
||||
elif isinstance(parsedResponse, dict):
|
||||
if "elements" in parsedResponse:
|
||||
generatedElements = parsedResponse["elements"]
|
||||
elif "sections" in parsedResponse and len(parsedResponse["sections"]) > 0:
|
||||
firstSection = parsedResponse["sections"][0]
|
||||
generatedElements = firstSection.get("elements", [])
|
||||
elif parsedResponse.get("type"):
|
||||
generatedElements = [parsedResponse]
|
||||
else:
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = []
|
||||
generatedElements = _elements_from_section_content_ai_json(parsedResponse)
|
||||
|
||||
aiResponse = _AiResponseFallback(aiResponseJson)
|
||||
except Exception as parseError:
|
||||
|
|
@ -1112,7 +1084,7 @@ class StructureFiller:
|
|||
|
||||
self.services.chat.progressLogUpdate(sectionOperationId, 0.4, "Calling AI for content generation")
|
||||
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_ANALYSE
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_GENERATE
|
||||
|
||||
if operationType == OperationTypeEnum.IMAGE_GENERATE:
|
||||
maxPromptLength = 4000
|
||||
|
|
@ -1135,6 +1107,7 @@ class StructureFiller:
|
|||
processingMode=ProcessingModeEnum.DETAILED
|
||||
)
|
||||
)
|
||||
checkWorkflowStopped(self.services)
|
||||
aiResponse = await self.aiService.callAi(request)
|
||||
generatedElements = []
|
||||
|
||||
|
|
@ -1179,22 +1152,16 @@ class StructureFiller:
|
|||
)
|
||||
|
||||
try:
|
||||
parsedResponse = json.loads(self.services.utils.jsonExtractString(aiResponseJson))
|
||||
if isinstance(parsedResponse, list):
|
||||
generatedElements = parsedResponse
|
||||
elif isinstance(parsedResponse, dict):
|
||||
if "elements" in parsedResponse:
|
||||
generatedElements = parsedResponse["elements"]
|
||||
elif "sections" in parsedResponse and len(parsedResponse["sections"]) > 0:
|
||||
firstSection = parsedResponse["sections"][0]
|
||||
generatedElements = firstSection.get("elements", [])
|
||||
elif parsedResponse.get("type"):
|
||||
generatedElements = [parsedResponse]
|
||||
else:
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = []
|
||||
from modules.shared.jsonUtils import tryParseJson
|
||||
|
||||
parsedResponse, parseError, _ = tryParseJson(aiResponseJson)
|
||||
if parseError is not None:
|
||||
logger.error(
|
||||
f"Error parsing response from _callAiWithLooping for section {sectionId}: {parseError}"
|
||||
)
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = _elements_from_section_content_ai_json(parsedResponse)
|
||||
aiResponse = _AiResponseFallback(aiResponseJson)
|
||||
except Exception as parseError:
|
||||
logger.error(f"Error parsing response from _callAiWithLooping for section {sectionId}: {str(parseError)}")
|
||||
|
|
@ -1371,7 +1338,7 @@ class StructureFiller:
|
|||
|
||||
self.services.chat.progressLogUpdate(sectionOperationId, 0.4, "Calling AI for content generation")
|
||||
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_ANALYSE
|
||||
operationType = OperationTypeEnum.IMAGE_GENERATE if contentType == "image" else OperationTypeEnum.DATA_GENERATE
|
||||
|
||||
if operationType == OperationTypeEnum.IMAGE_GENERATE:
|
||||
maxPromptLength = 4000
|
||||
|
|
@ -1439,22 +1406,16 @@ class StructureFiller:
|
|||
)
|
||||
|
||||
try:
|
||||
parsedResponse = json.loads(self.services.utils.jsonExtractString(aiResponseJson))
|
||||
if isinstance(parsedResponse, list):
|
||||
generatedElements = parsedResponse
|
||||
elif isinstance(parsedResponse, dict):
|
||||
if "elements" in parsedResponse:
|
||||
generatedElements = parsedResponse["elements"]
|
||||
elif "sections" in parsedResponse and len(parsedResponse["sections"]) > 0:
|
||||
firstSection = parsedResponse["sections"][0]
|
||||
generatedElements = firstSection.get("elements", [])
|
||||
elif parsedResponse.get("type"):
|
||||
generatedElements = [parsedResponse]
|
||||
else:
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = []
|
||||
from modules.shared.jsonUtils import tryParseJson
|
||||
|
||||
parsedResponse, parseError, _ = tryParseJson(aiResponseJson)
|
||||
if parseError is not None:
|
||||
logger.error(
|
||||
f"Error parsing response from _callAiWithLooping for section {sectionId}: {parseError}"
|
||||
)
|
||||
generatedElements = []
|
||||
else:
|
||||
generatedElements = _elements_from_section_content_ai_json(parsedResponse)
|
||||
aiResponse = _AiResponseFallback(aiResponseJson)
|
||||
except Exception as parseError:
|
||||
logger.error(f"Error parsing response from _callAiWithLooping for section {sectionId}: {str(parseError)}")
|
||||
|
|
|
|||
|
|
@ -90,8 +90,7 @@ class StructureGenerator:
|
|||
)
|
||||
|
||||
try:
|
||||
# Baue Chapter-Struktur-Prompt mit Content-Index
|
||||
structurePrompt = self._buildChapterStructurePrompt(
|
||||
structurePrompt, templateStructure = self._buildChapterStructurePrompt(
|
||||
userPrompt=userPrompt,
|
||||
contentParts=contentParts,
|
||||
outputFormat=outputFormat
|
||||
|
|
@ -108,12 +107,6 @@ class StructureGenerator:
|
|||
resultFormat="json"
|
||||
)
|
||||
|
||||
structurePrompt, templateStructure = self._buildChapterStructurePrompt(
|
||||
userPrompt=userPrompt,
|
||||
contentParts=contentParts,
|
||||
outputFormat=outputFormat
|
||||
)
|
||||
|
||||
# Create prompt builder for continuation support
|
||||
async def buildChapterStructurePromptWithContinuation(
|
||||
continuationContext: Any,
|
||||
|
|
@ -196,6 +189,13 @@ CRITICAL:
|
|||
contentParts=None # Do not pass ContentParts - only metadata needed, not content extraction
|
||||
)
|
||||
|
||||
if not isinstance(aiResponseJson, str) or not aiResponseJson.strip():
|
||||
raise ValueError(
|
||||
"Structure generation returned no JSON text from the model (empty response after retries). "
|
||||
"Check the AI provider, allowed models, billing, and debug artifact "
|
||||
"'chapter_structure_generation_response'."
|
||||
)
|
||||
|
||||
# Parse the complete JSON response (looping system already handles completion)
|
||||
extractedJson = self.services.utils.jsonExtractString(aiResponseJson)
|
||||
parsedJson, parseError, cleanedJson = self.services.utils.jsonTryParse(extractedJson)
|
||||
|
|
@ -215,7 +215,12 @@ CRITICAL:
|
|||
raise ValueError(f"Failed to parse JSON structure after repair: {str(parseError)}")
|
||||
else:
|
||||
logger.error(f"Failed to repair JSON. Parse error: {str(parseError)}")
|
||||
logger.error(f"Cleaned JSON preview (first 500 chars): {cleanedJson[:500]}")
|
||||
raw_preview = (extractedJson or "")[:500]
|
||||
logger.error(
|
||||
"Raw extract preview (first 500 chars): %r",
|
||||
raw_preview,
|
||||
)
|
||||
logger.error(f"Cleaned JSON preview (first 500 chars): {cleanedJson[:500]!r}")
|
||||
raise ValueError(f"Failed to parse JSON structure: {str(parseError)}")
|
||||
else:
|
||||
structure = parsedJson
|
||||
|
|
|
|||
|
|
@ -23,7 +23,11 @@ class ChatService:
|
|||
from modules.interfaces.interfaceDbManagement import getInterface as getComponentInterface
|
||||
from modules.interfaces.interfaceDbChat import getInterface as getChatInterface
|
||||
self.interfaceDbApp = getAppInterface(context.user, mandateId=context.mandate_id)
|
||||
self.interfaceDbComponent = getComponentInterface(context.user, mandateId=context.mandate_id)
|
||||
self.interfaceDbComponent = getComponentInterface(
|
||||
context.user,
|
||||
mandateId=context.mandate_id,
|
||||
featureInstanceId=context.feature_instance_id,
|
||||
)
|
||||
self.interfaceDbChat = getChatInterface(
|
||||
context.user,
|
||||
mandateId=context.mandate_id,
|
||||
|
|
@ -36,6 +40,26 @@ class ChatService:
|
|||
"""Workflow from context (stable during workflow execution)."""
|
||||
return self._context.workflow
|
||||
|
||||
def _chat_document_from_management_file(self, file_id: str) -> Optional[ChatDocument]:
|
||||
"""Build a ChatDocument when docItem references a management FileItem (e.g. automation uploads) without a chat message."""
|
||||
try:
|
||||
fi = self.interfaceDbComponent.getFile(file_id)
|
||||
except Exception as e:
|
||||
logger.debug("getFile(%s) failed: %s", file_id, e)
|
||||
return None
|
||||
if fi is None:
|
||||
return None
|
||||
wf = self._workflow
|
||||
wf_id = wf.id if wf else "no-workflow"
|
||||
return ChatDocument(
|
||||
id=file_id,
|
||||
messageId=f"_filestore:{wf_id}",
|
||||
fileId=fi.id,
|
||||
fileName=fi.fileName or "document",
|
||||
fileSize=int(fi.fileSize or 0),
|
||||
mimeType=fi.mimeType or "application/octet-stream",
|
||||
)
|
||||
|
||||
def getChatDocumentsFromDocumentList(self, documentList) -> List[ChatDocument]:
|
||||
"""Get ChatDocuments from a DocumentReferenceList.
|
||||
|
||||
|
|
@ -126,14 +150,28 @@ class ChatService:
|
|||
|
||||
if message.documents:
|
||||
for doc in message.documents:
|
||||
if doc.id == docId:
|
||||
if doc.id == docId or getattr(doc, "fileId", None) == docId:
|
||||
allDocuments.append(doc)
|
||||
docFound = True
|
||||
logger.debug(f"Matched document reference '{docRef}' to document {doc.id} (fileName: {getattr(doc, 'fileName', 'unknown')}) by documentId")
|
||||
logger.debug(
|
||||
f"Matched document reference '{docRef}' to document {doc.id} "
|
||||
f"(fileName: {getattr(doc, 'fileName', 'unknown')}) by id/fileId"
|
||||
)
|
||||
break
|
||||
if docFound:
|
||||
break
|
||||
|
||||
if not docFound:
|
||||
synth = self._chat_document_from_management_file(docId)
|
||||
if synth is not None:
|
||||
allDocuments.append(synth)
|
||||
docFound = True
|
||||
logger.info(
|
||||
"Resolved document reference %r via FileItem %s (automation / transient workflow)",
|
||||
docRef,
|
||||
docId,
|
||||
)
|
||||
|
||||
# Fallback: If not found by documentId and it looks like a filename (has file extension), try filename matching
|
||||
# This handles cases where AI incorrectly generates docItem:filename.docx
|
||||
if not docFound and '.' in docId and len(parts) == 2:
|
||||
|
|
@ -485,33 +523,11 @@ class ChatService:
|
|||
return results
|
||||
|
||||
def listGroups(self, contextKey: str = "files/list") -> list:
|
||||
"""List all groups in the groupTree for the current context."""
|
||||
try:
|
||||
existing = self.interfaceDbApp.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return []
|
||||
def _flatten(nodes, depth=0):
|
||||
result = []
|
||||
for n in nodes:
|
||||
nd = n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
result.append({"id": nd.get("id"), "name": nd.get("name"), "depth": depth, "itemCount": len(nd.get("itemIds", []))})
|
||||
result.extend(_flatten(nd.get("subGroups", []), depth + 1))
|
||||
return result
|
||||
return _flatten(existing.rootGroups)
|
||||
except Exception as e:
|
||||
"""Stub — file group tree removed. Returns empty list."""
|
||||
return []
|
||||
|
||||
def listFilesInGroup(self, groupId: str, contextKey: str = "files/list") -> list:
|
||||
"""List file IDs in a specific group (recursive)."""
|
||||
try:
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
existing = self.interfaceDbApp.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return []
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in existing.rootGroups]
|
||||
ids = _collectItemIds(nodes, groupId)
|
||||
return list(ids) if ids else []
|
||||
except Exception:
|
||||
"""Stub — file group tree removed. Returns empty list."""
|
||||
return []
|
||||
|
||||
# ---- DataSource CRUD ----
|
||||
|
|
|
|||
|
|
@ -166,12 +166,28 @@ class ClickupService:
|
|||
page: int = 0,
|
||||
include_closed: bool = False,
|
||||
subtasks: bool = True,
|
||||
dateCreatedGt: Optional[int] = None,
|
||||
dateCreatedLt: Optional[int] = None,
|
||||
dateUpdatedGt: Optional[int] = None,
|
||||
dateUpdatedLt: Optional[int] = None,
|
||||
customFields: Optional[List[Dict[str, Any]]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
params: Dict[str, Any] = {
|
||||
"page": page,
|
||||
"subtasks": str(subtasks).lower(),
|
||||
"include_closed": str(include_closed).lower(),
|
||||
}
|
||||
if dateCreatedGt is not None:
|
||||
params["date_created_gt"] = dateCreatedGt
|
||||
if dateCreatedLt is not None:
|
||||
params["date_created_lt"] = dateCreatedLt
|
||||
if dateUpdatedGt is not None:
|
||||
params["date_updated_gt"] = dateUpdatedGt
|
||||
if dateUpdatedLt is not None:
|
||||
params["date_updated_lt"] = dateUpdatedLt
|
||||
if customFields:
|
||||
import json as _json
|
||||
params["custom_fields"] = _json.dumps(customFields)
|
||||
return await self._request("GET", f"/list/{list_id}/task", params=params)
|
||||
|
||||
async def getTask(self, task_id: str, *, include_subtasks: bool = True) -> Dict[str, Any]:
|
||||
|
|
|
|||
|
|
@ -79,7 +79,15 @@ class RendererCodeCsv(BaseCodeRenderer):
|
|||
|
||||
return renderedDocs
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""
|
||||
Render method for document generation compatibility.
|
||||
Delegates to document renderer if needed, or handles code files directly.
|
||||
|
|
@ -94,7 +102,7 @@ class RendererCodeCsv(BaseCodeRenderer):
|
|||
# Document generation path - delegate to document renderer
|
||||
from .rendererCsv import RendererCsv
|
||||
documentRenderer = RendererCsv(self.services)
|
||||
return await documentRenderer.render(extractedContent, title, userPrompt, aiService)
|
||||
return await documentRenderer.render(extractedContent, title, userPrompt, aiService, style=style)
|
||||
|
||||
def _validateAndFixCsv(self, content: str) -> str:
|
||||
"""Validate CSV structure and fix common issues."""
|
||||
|
|
|
|||
|
|
@ -91,7 +91,15 @@ class RendererCodeJson(BaseCodeRenderer):
|
|||
|
||||
return renderedDocs
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""
|
||||
Render method for document generation compatibility.
|
||||
Delegates to document renderer if needed, or handles code files directly.
|
||||
|
|
@ -107,7 +115,7 @@ class RendererCodeJson(BaseCodeRenderer):
|
|||
# Import here to avoid circular dependency
|
||||
from .rendererJson import RendererJson
|
||||
documentRenderer = RendererJson(self.services)
|
||||
return await documentRenderer.render(extractedContent, title, userPrompt, aiService)
|
||||
return await documentRenderer.render(extractedContent, title, userPrompt, aiService, style=style)
|
||||
|
||||
def _extractJsonStatistics(self, parsed: Any) -> Dict[str, Any]:
|
||||
"""Extract JSON statistics for validation (object count, array count, key count)."""
|
||||
|
|
|
|||
|
|
@ -78,11 +78,20 @@ class RendererCodeXml(BaseCodeRenderer):
|
|||
|
||||
return renderedDocs
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""
|
||||
Render method for document generation compatibility.
|
||||
For XML, we only support code generation (no document renderer exists yet).
|
||||
"""
|
||||
_ = style
|
||||
# Check if this is code generation (has files array)
|
||||
if "files" in extractedContent:
|
||||
# Code generation path - use renderCodeFiles
|
||||
|
|
|
|||
|
|
@ -39,8 +39,27 @@ class RendererCsv(BaseRenderer):
|
|||
"""
|
||||
return ["table", "code_block"]
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
<<<<<<< HEAD
|
||||
=======
|
||||
<<<<<<< HEAD
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||
=======
|
||||
>>>>>>> 875f8252 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
<<<<<<< HEAD
|
||||
=======
|
||||
>>>>>>> 0659d0d2 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
>>>>>>> 875f8252 (ValueOn Lead to Offer durchgespielt, bugfixes in Dateigenerierung und ai nodes)
|
||||
"""Render extracted JSON content to CSV format. Produces one CSV file per table section."""
|
||||
_ = style
|
||||
try:
|
||||
# Validate JSON structure
|
||||
if not self._validateJsonStructure(extractedContent):
|
||||
|
|
|
|||
|
|
@ -43,8 +43,17 @@ class RendererImage(BaseRenderer):
|
|||
"""
|
||||
return ["image"]
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""Render extracted JSON content to image format using AI image generation."""
|
||||
_ = style
|
||||
try:
|
||||
# Generate AI image from content
|
||||
imageContent = await self._generateAiImage(extractedContent, title, userPrompt, aiService)
|
||||
|
|
|
|||
|
|
@ -42,8 +42,17 @@ class RendererJson(BaseRenderer):
|
|||
# Return all types except image
|
||||
return [st for st in supportedSectionTypes if st != "image"]
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""Render extracted JSON content to JSON format."""
|
||||
_ = style
|
||||
try:
|
||||
# The extracted content should already be JSON from the AI
|
||||
# Just validate and format it
|
||||
|
|
|
|||
|
|
@ -40,8 +40,17 @@ class RendererMarkdown(BaseRenderer):
|
|||
from modules.datamodels.datamodelJson import supportedSectionTypes
|
||||
return [st for st in supportedSectionTypes if st != "image"]
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""Render extracted JSON content to Markdown format."""
|
||||
_ = style
|
||||
try:
|
||||
# Generate markdown from JSON structure
|
||||
markdownContent = self._generateMarkdownFromJson(extractedContent, title)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import re
|
|||
|
||||
from .documentRendererBaseTemplate import BaseRenderer
|
||||
from modules.datamodels.datamodelDocument import RenderedDocument
|
||||
from typing import Dict, Any, List, Optional
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
|
||||
class RendererText(BaseRenderer):
|
||||
"""Renders content to plain text format with format-specific extraction."""
|
||||
|
|
@ -76,8 +76,17 @@ class RendererText(BaseRenderer):
|
|||
# Text renderer accepts all types except images
|
||||
return [st for st in supportedSectionTypes if st != "image"]
|
||||
|
||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
||||
async def render(
|
||||
self,
|
||||
extractedContent: Dict[str, Any],
|
||||
title: str,
|
||||
userPrompt: str = None,
|
||||
aiService=None,
|
||||
*,
|
||||
style: Dict[str, Any] = None,
|
||||
) -> List[RenderedDocument]:
|
||||
"""Render extracted JSON content to plain text format."""
|
||||
_ = style # unified style from renderReport; plain text ignores formatting hints
|
||||
try:
|
||||
# Generate text from JSON structure
|
||||
textContent = self._generateTextFromJson(extractedContent, title)
|
||||
|
|
@ -187,8 +196,10 @@ class RendererText(BaseRenderer):
|
|||
textParts.append(f"[Reference: {label}]")
|
||||
continue
|
||||
elif element_type == "extracted_text":
|
||||
# Extracted text format
|
||||
# Extracted text format (str or raw bytes from ContentPart)
|
||||
content = element.get("content", "")
|
||||
if isinstance(content, (bytes, bytearray, memoryview)):
|
||||
content = bytes(content).decode("utf-8", errors="replace")
|
||||
source = element.get("source", "")
|
||||
if content:
|
||||
source_text = f" (Source: {source})" if source else ""
|
||||
|
|
@ -263,16 +274,16 @@ class RendererText(BaseRenderer):
|
|||
textParts = []
|
||||
|
||||
# Create table header
|
||||
headerLine = " | ".join(str(header) for header in headers)
|
||||
headerLine = " | ".join(self._tableCellToPlainText(h) for h in headers)
|
||||
textParts.append(headerLine)
|
||||
|
||||
# Add separator line
|
||||
separatorLine = " | ".join("-" * len(str(header)) for header in headers)
|
||||
separatorLine = " | ".join("-" * len(self._tableCellToPlainText(h)) for h in headers)
|
||||
textParts.append(separatorLine)
|
||||
|
||||
# Add data rows
|
||||
for row in rows:
|
||||
rowLine = " | ".join(str(cellData) for cellData in row)
|
||||
rowLine = " | ".join(self._tableCellToPlainText(cellData) for cellData in row)
|
||||
textParts.append(rowLine)
|
||||
|
||||
return '\n'.join(textParts)
|
||||
|
|
@ -299,6 +310,9 @@ class RendererText(BaseRenderer):
|
|||
textParts.append(f"- {self._stripMarkdownForPlainText(item)}")
|
||||
elif isinstance(item, dict) and "text" in item:
|
||||
textParts.append(f"- {self._stripMarkdownForPlainText(item['text'])}")
|
||||
elif isinstance(item, list):
|
||||
# markdownToDocumentJson: each item is List[InlineRun]
|
||||
textParts.append(f"- {self._inlineRunsToPlainText(item)}")
|
||||
|
||||
return '\n'.join(textParts)
|
||||
|
||||
|
|
@ -311,22 +325,27 @@ class RendererText(BaseRenderer):
|
|||
try:
|
||||
# Extract from nested content structure: element.content.{text, level}
|
||||
content = headingData.get("content", {})
|
||||
if not isinstance(content, dict):
|
||||
return ""
|
||||
if isinstance(content, dict) and content:
|
||||
text = self._stripMarkdownForPlainText(content.get("text", ""))
|
||||
level = content.get("level", 1)
|
||||
|
||||
if text:
|
||||
level = max(1, min(6, level))
|
||||
if level == 1:
|
||||
return f"{text}\n{'=' * len(text)}"
|
||||
elif level == 2:
|
||||
return f"{text}\n{'-' * len(text)}"
|
||||
else:
|
||||
return f"{'#' * level} {text}"
|
||||
|
||||
# AI shorthand: {"type":"heading","text":"...","level":2}
|
||||
text = self._stripMarkdownForPlainText(str(headingData.get("text", "") or ""))
|
||||
level = headingData.get("level", 1)
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
try:
|
||||
level_i = int(level) if level is not None else 1
|
||||
except (TypeError, ValueError):
|
||||
level_i = 1
|
||||
level_i = max(1, min(6, level_i))
|
||||
if level_i == 1:
|
||||
return f"{text}\n{'=' * len(text)}"
|
||||
if level_i == 2:
|
||||
return f"{text}\n{'-' * len(text)}"
|
||||
return f"{'#' * level_i} {text}"
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error rendering heading: {str(e)}")
|
||||
return ""
|
||||
|
|
@ -345,12 +364,65 @@ class RendererText(BaseRenderer):
|
|||
text = re.sub(r'`([^`]+)`', r'\1', text)
|
||||
return text.strip()
|
||||
|
||||
def _inlineRunsToPlainText(self, runs: Union[List[Any], Any]) -> str:
|
||||
"""Flatten InlineRun dicts (from markdownToDocumentJson) to a single string."""
|
||||
if runs is None:
|
||||
return ""
|
||||
if isinstance(runs, dict):
|
||||
runs = [runs]
|
||||
if not isinstance(runs, list):
|
||||
return self._stripMarkdownForPlainText(str(runs))
|
||||
parts: List[str] = []
|
||||
for run in runs:
|
||||
if not isinstance(run, dict):
|
||||
parts.append(str(run))
|
||||
continue
|
||||
t = run.get("type") or "text"
|
||||
val = run.get("value", "")
|
||||
if t == "text":
|
||||
parts.append(str(val))
|
||||
elif t in ("bold", "italic", "code"):
|
||||
parts.append(str(val))
|
||||
elif t == "link":
|
||||
parts.append(str(val))
|
||||
elif t == "image":
|
||||
parts.append(f"[{val}]")
|
||||
else:
|
||||
parts.append(str(val))
|
||||
return "".join(parts)
|
||||
|
||||
def _tableCellToPlainText(self, cell: Any) -> str:
|
||||
"""Table header/cell: plain str, legacy dict, or List[InlineRun]."""
|
||||
if cell is None:
|
||||
return ""
|
||||
if isinstance(cell, str):
|
||||
return self._stripMarkdownForPlainText(cell)
|
||||
if isinstance(cell, list):
|
||||
return self._inlineRunsToPlainText(cell)
|
||||
if isinstance(cell, dict) and "text" in cell:
|
||||
return self._stripMarkdownForPlainText(str(cell["text"]))
|
||||
return self._stripMarkdownForPlainText(str(cell))
|
||||
|
||||
def _renderJsonParagraph(self, paragraphData: Dict[str, Any]) -> str:
|
||||
"""Render a JSON paragraph to text. Strips markdown for plain text output."""
|
||||
try:
|
||||
# Extract from nested content structure
|
||||
content = paragraphData.get("content", {})
|
||||
# Models often return {"type":"paragraph","text":"..."} without nested "content"
|
||||
top = paragraphData.get("text")
|
||||
raw_content = paragraphData.get("content", {})
|
||||
if isinstance(top, str) and top.strip():
|
||||
if raw_content is None or raw_content == {}:
|
||||
return self._stripMarkdownForPlainText(top)
|
||||
if isinstance(raw_content, dict):
|
||||
if not (raw_content.get("text") or raw_content.get("inlineRuns")):
|
||||
return self._stripMarkdownForPlainText(top)
|
||||
|
||||
content = raw_content
|
||||
if content is None:
|
||||
content = {}
|
||||
if isinstance(content, dict):
|
||||
runs = self._inlineRunsFromContent(content)
|
||||
if runs:
|
||||
return self._stripMarkdownForPlainText(self._inlineRunsToPlainText(runs))
|
||||
text = content.get("text", "")
|
||||
elif isinstance(content, str):
|
||||
text = content
|
||||
|
|
|
|||
|
|
@ -2172,11 +2172,13 @@ def getContexts(
|
|||
>>> print(contexts.overlapContext) # "" (empty - JSON is complete)
|
||||
>>> print(contexts.jsonParsingSuccess) # True
|
||||
"""
|
||||
# First, check if original JSON is already complete (parseable without modification)
|
||||
# Completeness must use the same pipeline as callers (fences, balanced extract, normalization).
|
||||
from modules.shared.jsonUtils import tryParseJson as _utils_try_parse_json
|
||||
|
||||
jsonIsComplete = False
|
||||
if truncatedJson and truncatedJson.strip():
|
||||
parsed, error = _tryParseJson(truncatedJson.strip())
|
||||
if error is None:
|
||||
_parsed_hdr, error_hdr, _ = _utils_try_parse_json(truncatedJson)
|
||||
if error_hdr is None:
|
||||
jsonIsComplete = True
|
||||
logger.debug("Original JSON is already complete (no cut point)")
|
||||
|
||||
|
|
@ -2193,28 +2195,27 @@ def getContexts(
|
|||
jsonParsingSuccess = False
|
||||
|
||||
if completePart and completePart.strip():
|
||||
# First attempt: parse as-is
|
||||
parsed, error = _tryParseJson(completePart)
|
||||
|
||||
parsed, error, _ = _utils_try_parse_json(completePart)
|
||||
if error is None:
|
||||
jsonParsingSuccess = True
|
||||
else:
|
||||
# Second attempt: repair internal errors and retry
|
||||
logger.debug(f"Initial parse failed: {error}, attempting repair")
|
||||
logger.debug(f"Initial parse failed: {error}, attempting internal repair")
|
||||
repairedCompletePart = _repairInternalJsonErrors(completePart)
|
||||
|
||||
parsed, error = _tryParseJson(repairedCompletePart)
|
||||
|
||||
parsed, error, _ = _utils_try_parse_json(repairedCompletePart)
|
||||
if error is None:
|
||||
# Repair succeeded - use repaired version
|
||||
completePart = repairedCompletePart
|
||||
jsonParsingSuccess = True
|
||||
logger.debug("JSON repair successful")
|
||||
else:
|
||||
# Repair also failed - keep original completePart, mark as failed
|
||||
logger.debug(f"JSON repair also failed: {error}")
|
||||
jsonParsingSuccess = False
|
||||
|
||||
# If completePart parses successfully, the merged/candidate JSON is structurally complete
|
||||
# after repair/closing — overlap from extractContinuationContexts on the *raw* candidate
|
||||
# would falsely signal truncation and trap callAiWithLooping in continuation iterations.
|
||||
if jsonParsingSuccess:
|
||||
overlap = ""
|
||||
|
||||
return JsonContinuationContexts(
|
||||
overlapContext=overlap,
|
||||
hierarchyContext=hierarchy,
|
||||
|
|
|
|||
|
|
@ -393,9 +393,10 @@ async def executeGraph(
|
|||
ordered_ids = [n.get("id") for n in ordered if n.get("id")]
|
||||
logger.info("executeGraph topoSort order: %s", ordered_ids)
|
||||
|
||||
nodeOutputs: Dict[str, Any] = dict(initialNodeOutputs or {})
|
||||
# Normalize resumed human-node output BEFORE copying into nodeOutputs — otherwise
|
||||
# normalizeToSchema only updates initialNodeOutputs and loop/refs still see raw
|
||||
# e.g. input.upload {files} without coerced DocumentList.documents.
|
||||
is_resume = startAfterNodeId is not None
|
||||
|
||||
if is_resume and initialNodeOutputs and startAfterNodeId:
|
||||
resumedNode = next((n for n in nodes if n.get("id") == startAfterNodeId), None)
|
||||
if resumedNode:
|
||||
|
|
@ -408,6 +409,8 @@ async def executeGraph(
|
|||
initialNodeOutputs[startAfterNodeId] = normalizeToSchema(resumedOutput, schema)
|
||||
except Exception as valErr:
|
||||
logger.warning("executeGraph resume: schema validation failed for %s: %s", startAfterNodeId, valErr)
|
||||
|
||||
nodeOutputs: Dict[str, Any] = dict(initialNodeOutputs or {})
|
||||
if not runId and automation2_interface and workflowId and not is_resume:
|
||||
run_context = {
|
||||
"connectionMap": connectionMap,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Action node executor - maps ai.*, email.*, sharepoint.*, clickup.*, file.*, trustee.* to method actions.
|
||||
#
|
||||
# Typed Port System: explicit DataRefs / static parameters only (no runtime wire-handover).
|
||||
# Typed Port System: explicit DataRefs / static parameters; optional ``documentList`` from input port 0
|
||||
# when the param is empty (same idea as IOExecutor wire fill).
|
||||
# ``materializeConnectionRefs`` (see pickNotPushMigration) may still rewrite empty connectionReference at run start.
|
||||
|
||||
import json
|
||||
|
|
@ -18,6 +19,25 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import Bil
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _coerce_document_data_to_bytes(raw: Any) -> Optional[bytes]:
|
||||
"""Normalize documentData (bytes/str/buffer) for DB file persistence."""
|
||||
if raw is None:
|
||||
return None
|
||||
if isinstance(raw, bytes):
|
||||
return raw if len(raw) > 0 else None
|
||||
if isinstance(raw, bytearray):
|
||||
b = bytes(raw)
|
||||
return b if len(b) > 0 else None
|
||||
if isinstance(raw, memoryview):
|
||||
b = raw.tobytes()
|
||||
return b if len(b) > 0 else None
|
||||
if isinstance(raw, str):
|
||||
b = raw.encode("utf-8")
|
||||
return b if len(b) > 0 else None
|
||||
return None
|
||||
|
||||
|
||||
_USER_CONNECTION_ID_RE = re.compile(
|
||||
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
||||
re.IGNORECASE,
|
||||
|
|
@ -219,6 +239,78 @@ def _getOutputSchemaName(nodeDef: Dict) -> str:
|
|||
return port0.get("schema", "ActionResult")
|
||||
|
||||
|
||||
def _extract_wired_document_list(inp: Any) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Build a DocumentList-shaped dict from upstream node output (matches IOExecutor wire behavior).
|
||||
Handles DocumentList, human upload shapes (file / files / fileIds), FileList, loop file items.
|
||||
During flow.loop body execution the loop node's output is
|
||||
{items, count, currentItem, currentIndex}; wired document actions must use currentItem.
|
||||
"""
|
||||
if inp is None:
|
||||
return None
|
||||
from modules.features.graphicalEditor.portTypes import (
|
||||
unwrapTransit,
|
||||
_coerce_document_list_upload_fields,
|
||||
_file_record_to_document,
|
||||
)
|
||||
|
||||
data = unwrapTransit(inp)
|
||||
if isinstance(data, str):
|
||||
one = _file_record_to_document(data)
|
||||
return {"documents": [one], "count": 1} if one else None
|
||||
if not isinstance(data, dict):
|
||||
return None
|
||||
d = dict(data)
|
||||
_coerce_document_list_upload_fields(d)
|
||||
# Per-iteration payload from executionEngine (flow.loop → downstream in loop body)
|
||||
if "currentItem" in d:
|
||||
ci = d.get("currentItem")
|
||||
if ci is not None:
|
||||
nested = _extract_wired_document_list(ci)
|
||||
if nested:
|
||||
return nested
|
||||
docs = d.get("documents")
|
||||
if isinstance(docs, list) and len(docs) > 0:
|
||||
return {"documents": docs, "count": d.get("count", len(docs))}
|
||||
raw_list = d.get("documentList")
|
||||
if isinstance(raw_list, list) and len(raw_list) > 0 and isinstance(raw_list[0], dict):
|
||||
return {"documents": raw_list, "count": len(raw_list)}
|
||||
doc_id = d.get("documentId") or d.get("id")
|
||||
if doc_id and str(doc_id).strip():
|
||||
one: Dict[str, Any] = {"id": str(doc_id).strip()}
|
||||
fn = d.get("fileName") or d.get("name")
|
||||
if fn:
|
||||
one["name"] = str(fn)
|
||||
mt = d.get("mimeType")
|
||||
if mt:
|
||||
one["mimeType"] = str(mt)
|
||||
return {"documents": [one], "count": 1}
|
||||
files = d.get("files")
|
||||
if isinstance(files, list) and files:
|
||||
collected = []
|
||||
for item in files:
|
||||
conv = _file_record_to_document(item) if isinstance(item, dict) else None
|
||||
if conv:
|
||||
collected.append(conv)
|
||||
if collected:
|
||||
return {"documents": collected, "count": len(collected)}
|
||||
return None
|
||||
|
||||
|
||||
def _document_list_param_is_empty(val: Any) -> bool:
|
||||
if val is None or val == "":
|
||||
return True
|
||||
if isinstance(val, list) and len(val) == 0:
|
||||
return True
|
||||
if isinstance(val, dict):
|
||||
if val.get("documents") or val.get("references") or val.get("items"):
|
||||
return False
|
||||
if val.get("documentId") or val.get("id"):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ActionNodeExecutor:
|
||||
"""Execute action nodes by mapping to method actions via ActionExecutor."""
|
||||
|
||||
|
|
@ -260,6 +352,17 @@ class ActionNodeExecutor:
|
|||
if pName and pName not in resolvedParams and "default" in pDef:
|
||||
resolvedParams[pName] = pDef["default"]
|
||||
|
||||
_param_names = {p.get("name") for p in nodeDef.get("parameters", []) if p.get("name")}
|
||||
if "documentList" in _param_names and _document_list_param_is_empty(resolvedParams.get("documentList")):
|
||||
_src_map = (context.get("inputSources") or {}).get(nodeId) or {}
|
||||
_entry = _src_map.get(0)
|
||||
if _entry:
|
||||
_src_node_id, _ = _entry
|
||||
_upstream = (context.get("nodeOutputs") or {}).get(_src_node_id)
|
||||
_wired = _extract_wired_document_list(_upstream)
|
||||
if _wired:
|
||||
resolvedParams["documentList"] = _wired
|
||||
|
||||
# 3. Resolve connectionReference
|
||||
chatService = getattr(self.services, "chat", None)
|
||||
_resolveConnectionParam(resolvedParams, chatService, self.services)
|
||||
|
|
@ -323,18 +426,33 @@ class ActionNodeExecutor:
|
|||
for d in (result.documents or []):
|
||||
dumped = d.model_dump() if hasattr(d, "model_dump") else dict(d) if isinstance(d, dict) else d
|
||||
rawData = getattr(d, "documentData", None) if hasattr(d, "documentData") else (dumped.get("documentData") if isinstance(dumped, dict) else None)
|
||||
if isinstance(dumped, dict) and isinstance(rawData, bytes) and len(rawData) > 0:
|
||||
rawBytes = _coerce_document_data_to_bytes(rawData)
|
||||
if isinstance(dumped, dict) and rawBytes:
|
||||
try:
|
||||
from modules.interfaces.interfaceDbManagement import getInterface as _getMgmtInterface
|
||||
from modules.interfaces.interfaceDbApp import getInterface as _getAppInterface
|
||||
from modules.security.rootAccess import getRootUser
|
||||
_userId = context.get("userId")
|
||||
_mandateId = context.get("mandateId")
|
||||
_instanceId = context.get("instanceId")
|
||||
_mgmt = _getMgmtInterface(getRootUser(), mandateId=_mandateId, featureInstanceId=_instanceId)
|
||||
_owner = None
|
||||
if _userId:
|
||||
try:
|
||||
_umap = _getAppInterface(getRootUser()).getUsersByIds([str(_userId)])
|
||||
_owner = _umap.get(str(_userId))
|
||||
except Exception as _ue:
|
||||
logger.warning("Could not resolve workflow user for file persistence: %s", _ue)
|
||||
if _owner is None:
|
||||
_owner = getRootUser()
|
||||
logger.debug(
|
||||
"Persisting workflow document as root user (no resolved owner userId=%r)",
|
||||
_userId,
|
||||
)
|
||||
_mgmt = _getMgmtInterface(_owner, mandateId=_mandateId, featureInstanceId=_instanceId)
|
||||
_docName = dumped.get("documentName") or f"workflow-result-{nodeId}.bin"
|
||||
_mimeType = dumped.get("mimeType") or "application/octet-stream"
|
||||
_fileItem = _mgmt.createFile(_docName, _mimeType, rawData)
|
||||
_mgmt.createFileData(_fileItem.id, rawData)
|
||||
_fileItem = _mgmt.createFile(_docName, _mimeType, rawBytes)
|
||||
_mgmt.createFileData(_fileItem.id, rawBytes)
|
||||
dumped["fileId"] = _fileItem.id
|
||||
dumped["id"] = _fileItem.id
|
||||
dumped["fileName"] = _fileItem.fileName
|
||||
|
|
@ -345,6 +463,20 @@ class ActionNodeExecutor:
|
|||
dumped["_hasBinaryData"] = True
|
||||
docsList.append(dumped)
|
||||
|
||||
# Clean DocumentList shape for document nodes (match file.create: documents + count, no AiResult fields)
|
||||
if outputSchema == "DocumentList" and nodeType in ("ai.generateDocument", "ai.convertDocument"):
|
||||
if not result.success:
|
||||
return _normalizeError(
|
||||
RuntimeError(str(result.error or "document action failed")),
|
||||
outputSchema,
|
||||
)
|
||||
list_out: Dict[str, Any] = {
|
||||
"documents": docsList,
|
||||
"count": len(docsList),
|
||||
}
|
||||
_attachConnectionProvenance(list_out, resolvedParams, outputSchema, chatService, self.services)
|
||||
return normalizeToSchema(list_out, outputSchema)
|
||||
|
||||
extractedContext = ""
|
||||
if result.documents:
|
||||
doc = result.documents[0]
|
||||
|
|
@ -377,7 +509,11 @@ class ActionNodeExecutor:
|
|||
if nodeType.startswith("ai."):
|
||||
out["prompt"] = promptText
|
||||
out["response"] = extractedContext
|
||||
out["context"] = f"{promptText}\n\n{extractedContext}" if promptText and extractedContext else (extractedContext or promptText)
|
||||
inputContext = resolvedParams.get("context")
|
||||
if inputContext is not None:
|
||||
out["context"] = inputContext if isinstance(inputContext, str) else json.dumps(inputContext, ensure_ascii=False, default=str)
|
||||
else:
|
||||
out["context"] = ""
|
||||
# Structured output
|
||||
if extractedContext:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -45,10 +45,12 @@ class IOExecutor:
|
|||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = nodeOutputs.get(srcId)
|
||||
from modules.workflows.automation2.executors.actionNodeExecutor import _getDocumentsFromUpstream
|
||||
docs = _getDocumentsFromUpstream(inp) if isinstance(inp, dict) else []
|
||||
from modules.workflows.automation2.executors.actionNodeExecutor import _extract_wired_document_list
|
||||
|
||||
wired = _extract_wired_document_list(inp)
|
||||
docs = (wired or {}).get("documents") if isinstance(wired, dict) else None
|
||||
if docs:
|
||||
resolvedParams.setdefault("documentList", docs)
|
||||
resolvedParams.setdefault("documentList", wired)
|
||||
elif inp is not None:
|
||||
resolvedParams.setdefault("input", inp)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,50 @@ from typing import Dict, List, Any, Tuple, Set, Optional
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ai_result_text_from_documents(d: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract plain-text body from AiResult-style ``documents[0].documentData``."""
|
||||
docs = d.get("documents")
|
||||
if not isinstance(docs, list) or not docs:
|
||||
return None
|
||||
d0 = docs[0]
|
||||
raw: Any = None
|
||||
if isinstance(d0, dict):
|
||||
raw = d0.get("documentData")
|
||||
elif d0 is not None:
|
||||
raw = getattr(d0, "documentData", None)
|
||||
if raw is None:
|
||||
return None
|
||||
if isinstance(raw, bytes):
|
||||
try:
|
||||
t = raw.decode("utf-8").strip()
|
||||
return t or None
|
||||
except (UnicodeDecodeError, ValueError):
|
||||
return None
|
||||
if isinstance(raw, str):
|
||||
s = raw.strip()
|
||||
return s or None
|
||||
return None
|
||||
|
||||
|
||||
def _ref_coalesce_empty_ai_result_text(data: Any, path: List[Any], resolved: Any) -> Any:
|
||||
"""If a ref targets AiResult text fields but resolves empty/missing, fall back to documents.
|
||||
|
||||
Needed when: optional ``responseData`` is absent (no synthetic ``{}``), ``response`` is
|
||||
still empty but ``documents`` hold the model output, or legacy graphs bind responseData only.
|
||||
"""
|
||||
if resolved not in (None, ""):
|
||||
return resolved
|
||||
if not isinstance(data, dict) or not path:
|
||||
return resolved
|
||||
head = path[0]
|
||||
if head not in ("response", "responseData", "context"):
|
||||
return resolved
|
||||
if head == "context" and len(path) != 1:
|
||||
return resolved
|
||||
fb = _ai_result_text_from_documents(data)
|
||||
return fb if fb is not None else resolved
|
||||
|
||||
|
||||
def parseGraph(graph: Dict[str, Any]) -> Tuple[List[Dict], List[Dict], Set[str]]:
|
||||
"""
|
||||
Parse graph into nodes, connections, and node IDs.
|
||||
|
|
@ -356,14 +400,15 @@ def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
|
|||
data = data.get("data", data)
|
||||
plist = list(path)
|
||||
resolved = _get_by_path(data, plist)
|
||||
if (
|
||||
resolved is None
|
||||
and isinstance(data, dict)
|
||||
and plist
|
||||
and plist[0] == "payload"
|
||||
and len(plist) > 1
|
||||
):
|
||||
if resolved is None and isinstance(data, dict) and plist:
|
||||
if plist[0] == "payload" and len(plist) > 1:
|
||||
# Strip explicit "payload" prefix (legacy DataPicker paths)
|
||||
resolved = _get_by_path(data, plist[1:])
|
||||
elif "payload" in data and isinstance(data["payload"], dict):
|
||||
# Form nodes store fields under {"payload": {fieldName: …}}.
|
||||
# DataPicker emits bare field paths like ["url"]; try under payload.
|
||||
resolved = _get_by_path(data["payload"], plist)
|
||||
resolved = _ref_coalesce_empty_ai_result_text(data, plist, resolved)
|
||||
return resolveParameterReferences(resolved, nodeOutputs)
|
||||
return value
|
||||
if value.get("type") == "value":
|
||||
|
|
@ -386,17 +431,34 @@ def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
|
|||
if len(parts) < 2:
|
||||
return json.dumps(data) if isinstance(data, (dict, list)) else str(data)
|
||||
rest = ".".join(parts[1:])
|
||||
if data is None:
|
||||
return m.group(0)
|
||||
for k in rest.split("."):
|
||||
if isinstance(data, dict) and k in data:
|
||||
data = data[k]
|
||||
elif isinstance(data, (list, tuple)) and k.isdigit():
|
||||
data = data[int(k)]
|
||||
|
||||
def _walk(root, keys):
|
||||
cur = root
|
||||
for k in keys:
|
||||
if isinstance(cur, dict) and k in cur:
|
||||
cur = cur[k]
|
||||
elif isinstance(cur, (list, tuple)) and k.isdigit():
|
||||
cur = cur[int(k)]
|
||||
else:
|
||||
return None
|
||||
return cur
|
||||
|
||||
keys = rest.split(".")
|
||||
result = _walk(data, keys)
|
||||
# Form nodes store fields under {"payload": {field: …}}.
|
||||
# Fall back to looking under "payload" when the direct path misses.
|
||||
if result is None and isinstance(data, dict) and "payload" in data:
|
||||
result = _walk(data["payload"], keys)
|
||||
if result is None:
|
||||
return m.group(0)
|
||||
return str(data) if data is not None else m.group(0)
|
||||
return str(result) if not isinstance(result, (dict, list)) else json.dumps(result, ensure_ascii=False)
|
||||
return re.sub(r"\{\{\s*([^}]+)\s*\}\}", repl, value)
|
||||
if isinstance(value, list):
|
||||
# contextBuilder: list where every item is a `{"type":"ref",...}` envelope.
|
||||
# Resolve each ref and join the serialised parts into a single prompt string.
|
||||
if value and all(isinstance(v, dict) and v.get("type") == "ref" for v in value):
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
parts = [serialize_context(resolveParameterReferences(v, nodeOutputs)) for v in value]
|
||||
return "\n\n".join(p for p in parts if p)
|
||||
return [resolveParameterReferences(v, nodeOutputs) for v in value]
|
||||
return value
|
||||
|
|
|
|||
|
|
@ -3,6 +3,30 @@
|
|||
|
||||
"""Shared helpers for AI workflow actions."""
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
|
||||
def serialize_context(val: Any) -> str:
|
||||
"""Convert any context value to a readable string for use in AI prompts.
|
||||
|
||||
- None / empty string → ""
|
||||
- empty dict (no keys) → "" (avoids literal "{}" in file.create / prompts)
|
||||
- str → as-is
|
||||
- dict / list → pretty-printed JSON
|
||||
- anything else → str()
|
||||
"""
|
||||
if val is None or val == "" or val == []:
|
||||
return ""
|
||||
if isinstance(val, dict) and len(val) == 0:
|
||||
return ""
|
||||
if isinstance(val, str):
|
||||
return val.strip()
|
||||
try:
|
||||
return json.dumps(val, ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
return str(val)
|
||||
|
||||
|
||||
def applyCommonAiParams(parameters: dict, request) -> None:
|
||||
"""Apply common AI parameters (requireNeutralization, allowedModels) from node to request."""
|
||||
|
|
|
|||
|
|
@ -14,11 +14,13 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import Bil
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
async def generateCode(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
prompt = parameters.get("prompt")
|
||||
if not prompt:
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
base_prompt = (parameters.get("prompt") or "").strip()
|
||||
context_val = serialize_context(parameters.get("context"))
|
||||
prompt = f"Kontext:\n{context_val}\n\n{base_prompt}" if context_val else base_prompt
|
||||
if not prompt.strip():
|
||||
return ActionResult.isFailure(error="prompt is required")
|
||||
|
||||
documentList = parameters.get("documentList", [])
|
||||
# Optional: if omitted, formats determined from prompt by AI
|
||||
resultType = parameters.get("resultType")
|
||||
|
||||
|
|
@ -31,19 +33,15 @@ async def generateCode(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
parentOperationId = parameters.get('parentOperationId')
|
||||
|
||||
try:
|
||||
# Convert documentList to DocumentReferenceList if needed
|
||||
docRefList = None
|
||||
if documentList:
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList
|
||||
from modules.datamodels.datamodelDocref import coerceDocumentReferenceList
|
||||
|
||||
if isinstance(documentList, DocumentReferenceList):
|
||||
docRefList = documentList
|
||||
elif isinstance(documentList, str):
|
||||
docRefList = DocumentReferenceList.from_string_list([documentList])
|
||||
elif isinstance(documentList, list):
|
||||
docRefList = DocumentReferenceList.from_string_list(documentList)
|
||||
raw_dl = parameters.get("documentList")
|
||||
if raw_dl is None or raw_dl == "":
|
||||
docRefList = None
|
||||
else:
|
||||
docRefList = DocumentReferenceList(references=[])
|
||||
docRefList = coerceDocumentReferenceList(raw_dl)
|
||||
if not docRefList.references:
|
||||
docRefList = None
|
||||
|
||||
# Prepare title
|
||||
title = "Generated Code"
|
||||
|
|
|
|||
|
|
@ -14,14 +14,18 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import Bil
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
prompt = parameters.get("prompt")
|
||||
if not prompt:
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
base_prompt = (parameters.get("prompt") or "").strip()
|
||||
context_val = serialize_context(parameters.get("context"))
|
||||
prompt = f"Kontext:\n{context_val}\n\n{base_prompt}" if context_val else base_prompt
|
||||
if not prompt.strip():
|
||||
return ActionResult.isFailure(error="prompt is required")
|
||||
|
||||
documentList = parameters.get("documentList", [])
|
||||
documentType = parameters.get("documentType")
|
||||
# Optional: if omitted, formats determined from prompt by AI
|
||||
resultType = parameters.get("resultType")
|
||||
# Prefer explicit outputFormat (flow UI); resultType remains for legacy / API callers.
|
||||
resultType = parameters.get("outputFormat") or parameters.get("resultType")
|
||||
if isinstance(resultType, str):
|
||||
resultType = resultType.strip().lstrip(".").lower() or None
|
||||
|
||||
if not resultType:
|
||||
logger.debug("resultType not provided - formats will be determined from prompt by AI")
|
||||
|
|
@ -32,22 +36,23 @@ async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
parentOperationId = parameters.get('parentOperationId')
|
||||
|
||||
try:
|
||||
# Convert documentList to DocumentReferenceList if needed
|
||||
# Convert documentList to DocumentReferenceList (handles dict {"documents": [...]}, list of ids, str, etc.)
|
||||
from modules.datamodels.datamodelDocref import coerceDocumentReferenceList
|
||||
|
||||
raw_dl = parameters.get("documentList")
|
||||
if raw_dl is None or raw_dl == "":
|
||||
docRefList = None
|
||||
if documentList:
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList
|
||||
|
||||
if isinstance(documentList, DocumentReferenceList):
|
||||
docRefList = documentList
|
||||
elif isinstance(documentList, str):
|
||||
docRefList = DocumentReferenceList.from_string_list([documentList])
|
||||
elif isinstance(documentList, list):
|
||||
docRefList = DocumentReferenceList.from_string_list(documentList)
|
||||
else:
|
||||
docRefList = DocumentReferenceList(references=[])
|
||||
docRefList = coerceDocumentReferenceList(raw_dl)
|
||||
if not docRefList.references:
|
||||
docRefList = None
|
||||
|
||||
# Prepare title
|
||||
title = parameters.get("documentType") or "Generated Document"
|
||||
title_raw = parameters.get("title")
|
||||
title = (title_raw.strip() if isinstance(title_raw, str) else "") or None
|
||||
if not title and isinstance(documentType, str) and documentType.strip():
|
||||
title = documentType.strip()
|
||||
if not title:
|
||||
title = "Generated Document"
|
||||
|
||||
# Call AI service for document generation
|
||||
# callAiContent handles documentList internally via Phases 5A-5E
|
||||
|
|
@ -95,6 +100,8 @@ async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
"actionType": "ai.generateDocument",
|
||||
"documentType": documentType,
|
||||
"resultType": resultType,
|
||||
"outputFormat": resultType,
|
||||
"title": title,
|
||||
}
|
||||
))
|
||||
|
||||
|
|
@ -116,14 +123,15 @@ async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
docName = sanitized
|
||||
|
||||
# Determine mime type
|
||||
rt = resultTypeFallback
|
||||
mimeType = "text/plain"
|
||||
if resultType == "html":
|
||||
if rt == "html":
|
||||
mimeType = "text/html"
|
||||
elif resultType == "json":
|
||||
elif rt == "json":
|
||||
mimeType = "application/json"
|
||||
elif resultType == "pdf":
|
||||
elif rt == "pdf":
|
||||
mimeType = "application/pdf"
|
||||
elif resultType == "md":
|
||||
elif rt == "md":
|
||||
mimeType = "text/markdown"
|
||||
|
||||
documents.append(ActionDocument(
|
||||
|
|
@ -134,6 +142,8 @@ async def generateDocument(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
"actionType": "ai.generateDocument",
|
||||
"documentType": documentType,
|
||||
"resultType": resultType,
|
||||
"outputFormat": resultType,
|
||||
"title": title,
|
||||
}
|
||||
))
|
||||
|
||||
|
|
|
|||
|
|
@ -75,8 +75,10 @@ def _action_docs_to_content_parts(services, docs: List[Any]) -> List[ContentPart
|
|||
|
||||
def _resolve_file_refs_to_content_parts(services, fileIdRefs) -> List[ContentPart]:
|
||||
"""Fetch files by ID from the file store and extract content.
|
||||
Used for automation2 workflows where documents are file-store references,
|
||||
not chat message attachments."""
|
||||
Used ONLY for automation2 workflows where documents are file-store
|
||||
references, not chat message attachments. In the agent/chat context,
|
||||
``DocumentItemReference`` holds ChatDocument IDs that must be resolved
|
||||
via ``getChatDocumentsFromDocumentList`` instead."""
|
||||
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy
|
||||
|
||||
mgmt = getattr(services, 'interfaceDbComponent', None)
|
||||
|
|
@ -171,11 +173,19 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
f"to DocumentReferenceList with {len(documentList.references)} references"
|
||||
)
|
||||
|
||||
# Resolve DocumentItemReferences (file-ID refs from automation2) directly
|
||||
# from the file store. These cannot be resolved via chat messages.
|
||||
# DocumentItemReferences carry either file-store IDs (automation2)
|
||||
# or ChatDocument IDs (agent context with docItem: refs).
|
||||
# Route based on context: if a chat workflow with messages exists,
|
||||
# let getChatDocumentsFromDocumentList handle them (it resolves
|
||||
# docItem:uuid via workflow.messages). Otherwise fall through to
|
||||
# the file-store path for automation2.
|
||||
from modules.datamodels.datamodelDocref import DocumentItemReference
|
||||
fileIdRefs = [r for r in documentList.references if isinstance(r, DocumentItemReference)]
|
||||
if fileIdRefs:
|
||||
chatService = getattr(self.services, 'chat', None)
|
||||
workflow = getattr(chatService, '_workflow', None) if chatService else None
|
||||
hasChatContext = workflow and getattr(workflow, 'messages', None)
|
||||
if not hasChatContext:
|
||||
extractedParts = _resolve_file_refs_to_content_parts(self.services, fileIdRefs)
|
||||
if extractedParts:
|
||||
inline_content_parts = (inline_content_parts or []) + extractedParts
|
||||
|
|
@ -210,17 +220,12 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
mimeMap = {"txt": "text/plain", "json": "application/json", "html": "text/html", "md": "text/markdown", "csv": "text/csv", "xml": "application/xml"}
|
||||
output_mime_type = mimeMap.get(normalized_result_type, "text/plain") if normalized_result_type else "text/plain"
|
||||
|
||||
# Normalize context: workflow refs may resolve to dict/list instead of str
|
||||
paramContext = parameters.get("context")
|
||||
if paramContext is not None and not isinstance(paramContext, str):
|
||||
try:
|
||||
paramContext = json.dumps(paramContext, ensure_ascii=False, default=str)
|
||||
parameters["context"] = paramContext
|
||||
logger.info(f"ai.process: Serialized non-string context ({type(parameters.get('context')).__name__}) to JSON ({len(paramContext)} chars)")
|
||||
except Exception as e:
|
||||
logger.warning(f"ai.process: Failed to serialize context: {e}")
|
||||
paramContext = str(paramContext)
|
||||
# Normalize context: serialize any non-string value (dict/list/int/…) to text
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
paramContext = serialize_context(parameters.get("context"))
|
||||
parameters["context"] = paramContext
|
||||
if paramContext:
|
||||
logger.info(f"ai.process: context serialized ({len(paramContext)} chars)")
|
||||
|
||||
# Phase 7.3: Pass documentList and/or contentParts to AI service
|
||||
contentParts: Optional[List[ContentPart]] = inline_content_parts
|
||||
|
|
@ -247,7 +252,7 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
self.services.chat.progressLogUpdate(operationId, 0.6, "Calling AI (simple mode)")
|
||||
|
||||
context_parts = []
|
||||
paramContext = parameters.get("context")
|
||||
paramContext = parameters.get("context") # already serialized above
|
||||
if paramContext and isinstance(paramContext, str) and paramContext.strip():
|
||||
context_parts.append(paramContext.strip())
|
||||
if documentList and len(documentList.references) > 0:
|
||||
|
|
|
|||
|
|
@ -13,10 +13,42 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import Bil
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _build_research_prompt(parameters: Dict[str, Any]) -> str:
|
||||
"""Assemble the final research prompt from prompt + optional context/documentList."""
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
base_prompt = (parameters.get("prompt") or "").strip()
|
||||
context_val = serialize_context(parameters.get("context"))
|
||||
doc_list = parameters.get("documentList")
|
||||
|
||||
parts: list[str] = []
|
||||
|
||||
if context_val:
|
||||
parts.append(f"Kontext:\n{context_val}")
|
||||
|
||||
# Extract text from documentList items if provided
|
||||
if doc_list:
|
||||
docs: list = []
|
||||
if isinstance(doc_list, dict):
|
||||
docs = doc_list.get("documents", []) or doc_list.get("items", [])
|
||||
elif isinstance(doc_list, list):
|
||||
docs = doc_list
|
||||
doc_texts = []
|
||||
for d in docs:
|
||||
if isinstance(d, dict):
|
||||
text = d.get("documentData") or d.get("text") or d.get("content") or ""
|
||||
if text and isinstance(text, str):
|
||||
doc_texts.append(text.strip())
|
||||
if doc_texts:
|
||||
parts.append("Dokumente:\n" + "\n---\n".join(doc_texts))
|
||||
|
||||
parts.append(base_prompt)
|
||||
return "\n\n".join(p for p in parts if p)
|
||||
|
||||
|
||||
async def webResearch(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
operationId = None
|
||||
try:
|
||||
prompt = parameters.get("prompt")
|
||||
prompt = _build_research_prompt(parameters)
|
||||
if not prompt:
|
||||
return ActionResult.isFailure(error="Research prompt is required")
|
||||
|
||||
|
|
|
|||
|
|
@ -289,6 +289,30 @@ class MethodAi(MethodBase):
|
|||
required=True,
|
||||
description="Description of the document to generate"
|
||||
),
|
||||
"outputFormat": WorkflowActionParameter(
|
||||
name="outputFormat",
|
||||
type="str",
|
||||
frontendType=FrontendType.SELECT,
|
||||
frontendOptions=["docx", "pdf", "txt", "html", "md"],
|
||||
required=False,
|
||||
default="docx",
|
||||
description="Rendered output format (same choices as file.create). If omitted alongside resultType, the model may infer format from the prompt."
|
||||
),
|
||||
"title": WorkflowActionParameter(
|
||||
name="title",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Document title / metadata (optional); used as generation title and for file naming hints."
|
||||
),
|
||||
"context": WorkflowActionParameter(
|
||||
name="context",
|
||||
type="Any",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
default="",
|
||||
description="Additional structured or text context from upstream steps; serialized into the prompt."
|
||||
),
|
||||
"documentList": WorkflowActionParameter(
|
||||
name="documentList",
|
||||
type="DocumentList",
|
||||
|
|
@ -302,16 +326,15 @@ class MethodAi(MethodBase):
|
|||
frontendType=FrontendType.SELECT,
|
||||
frontendOptions=["letter", "memo", "proposal", "contract", "report", "email"],
|
||||
required=False,
|
||||
description="Type of document"
|
||||
description="Type of document (content hint for the model); used as title fallback when title is empty."
|
||||
),
|
||||
"resultType": WorkflowActionParameter(
|
||||
name="resultType",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
default="txt",
|
||||
description="Output format (e.g., txt, html, pdf, docx, md, json, csv, xlsx, pptx, png, jpg). Optional: if omitted, formats are determined from prompt by AI. Default \"txt\" is validation fallback only. With per-document format determination, AI can determine different formats for different documents based on prompt."
|
||||
)
|
||||
description="Legacy/API output format extension (e.g. txt, docx). Ignored when outputFormat is set."
|
||||
),
|
||||
},
|
||||
execute=generateDocument.__get__(self, self.__class__)
|
||||
),
|
||||
|
|
|
|||
|
|
@ -31,8 +31,30 @@ async def list_tasks(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
|
||||
page = int(parameters.get("page") or 0)
|
||||
include_closed = bool(parameters.get("includeClosed", False))
|
||||
|
||||
dateFilters = {}
|
||||
for key in ("dateCreatedGt", "dateCreatedLt", "dateUpdatedGt", "dateUpdatedLt"):
|
||||
val = parameters.get(key)
|
||||
if val is not None and str(val).strip():
|
||||
try:
|
||||
dateFilters[key] = int(val)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
rawCustomFields = parameters.get("customFields")
|
||||
customFields = None
|
||||
if rawCustomFields:
|
||||
if isinstance(rawCustomFields, str):
|
||||
try:
|
||||
customFields = json.loads(rawCustomFields)
|
||||
except json.JSONDecodeError:
|
||||
return ActionResult.isFailure(error="customFields must be valid JSON array")
|
||||
elif isinstance(rawCustomFields, list):
|
||||
customFields = rawCustomFields
|
||||
|
||||
data = await self.services.clickup.getTasksInList(
|
||||
list_id, page=page, include_closed=include_closed, subtasks=True
|
||||
list_id, page=page, include_closed=include_closed, subtasks=True,
|
||||
**dateFilters, customFields=customFields,
|
||||
)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
|
|
|||
|
|
@ -66,6 +66,41 @@ class MethodClickup(MethodBase):
|
|||
default=False,
|
||||
description="Include closed tasks",
|
||||
),
|
||||
"dateCreatedGt": WorkflowActionParameter(
|
||||
name="dateCreatedGt",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
description="Filter: created after this Unix ms timestamp",
|
||||
),
|
||||
"dateCreatedLt": WorkflowActionParameter(
|
||||
name="dateCreatedLt",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
description="Filter: created before this Unix ms timestamp",
|
||||
),
|
||||
"dateUpdatedGt": WorkflowActionParameter(
|
||||
name="dateUpdatedGt",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
description="Filter: updated after this Unix ms timestamp",
|
||||
),
|
||||
"dateUpdatedLt": WorkflowActionParameter(
|
||||
name="dateUpdatedLt",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
description="Filter: updated before this Unix ms timestamp",
|
||||
),
|
||||
"customFields": WorkflowActionParameter(
|
||||
name="customFields",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description='JSON array of custom field filters per ClickUp API, e.g. [{"field_id":"abc","operator":"=","value":"123"}]',
|
||||
),
|
||||
},
|
||||
execute=list_tasks.__get__(self, self.__class__),
|
||||
),
|
||||
|
|
|
|||
|
|
@ -35,6 +35,12 @@ def _persistDocumentsToUserFiles(
|
|||
return
|
||||
if not mgmt:
|
||||
return
|
||||
logger.info(
|
||||
"file.create persist: mgmt=%s id(mgmt)=%s has_createFileData=%s",
|
||||
type(mgmt).__name__,
|
||||
id(mgmt),
|
||||
hasattr(mgmt, "createFileData"),
|
||||
)
|
||||
for doc in action_documents:
|
||||
try:
|
||||
doc_data = doc.documentData if hasattr(doc, "documentData") else doc.get("documentData")
|
||||
|
|
@ -54,8 +60,15 @@ def _persistDocumentsToUserFiles(
|
|||
or doc.get("mimeType")
|
||||
or "application/octet-stream"
|
||||
)
|
||||
logger.info(
|
||||
"file.create persist: calling createFile name=%s bytes=%s",
|
||||
doc_name,
|
||||
len(content),
|
||||
)
|
||||
file_item = mgmt.createFile(doc_name, mime, content)
|
||||
mgmt.createFileData(file_item.id, content)
|
||||
logger.info("file.create persist: createFile returned id=%s", file_item.id)
|
||||
ok = mgmt.createFileData(file_item.id, content)
|
||||
logger.info("file.create persist: createFileData returned %s for id=%s", ok, file_item.id)
|
||||
meta = getattr(doc, "validationMetadata", None) or doc.get("validationMetadata") or {}
|
||||
if isinstance(meta, dict):
|
||||
meta["fileId"] = file_item.id
|
||||
|
|
@ -74,12 +87,15 @@ async def create(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
Create a file from context (text/markdown from upstream AI node).
|
||||
Uses GenerationService.renderReport to produce docx, pdf, txt, md, html, xlsx, etc.
|
||||
"""
|
||||
context = parameters.get("context", "") or parameters.get("text", "") or ""
|
||||
if not isinstance(context, str):
|
||||
context = str(context) if context else ""
|
||||
context = context.strip()
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
raw_context = parameters.get("context", "") or parameters.get("text", "") or ""
|
||||
context = serialize_context(raw_context)
|
||||
|
||||
if not context:
|
||||
logger.warning(
|
||||
"file.create: context empty after resolve — check DataRefs (e.g. Antworttext / "
|
||||
"documents[0].documentData from the AI step)."
|
||||
)
|
||||
return ActionResult.isFailure(error="context is required (connect an AI node or provide text)")
|
||||
|
||||
outputFormat = (parameters.get("outputFormat") or "docx").strip().lower().lstrip(".")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ async def composeAndDraftEmailWithContext(self, parameters: Dict[str, Any]) -> A
|
|||
try:
|
||||
connectionReference = parameters.get("connectionReference")
|
||||
to = parameters.get("to") or [] # Optional for drafts - can save draft without recipients
|
||||
context = parameters.get("context")
|
||||
from modules.workflows.methods.methodAi._common import serialize_context
|
||||
context = serialize_context(parameters.get("context")) or None
|
||||
documentList = parameters.get("documentList") or []
|
||||
replySourceDocuments = parameters.get("replySourceDocuments") or [] # Original email(s) for reply attachment
|
||||
# ``attachments`` (added in 2026-04 for the PWG pilot) is a list of
|
||||
|
|
|
|||
58
scripts/stage0_filefolder_schema_check.py
Normal file
58
scripts/stage0_filefolder_schema_check.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"""Stage 0: verify FileFolder table + FileItem.folderId column in management DB.
|
||||
|
||||
Run from the gateway directory (same as uvicorn):
|
||||
python -m scripts.stage0_filefolder_schema_check
|
||||
"""
|
||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
managementDatabase = "poweron_management"
|
||||
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||
dbUser = APP_CONFIG.get("DB_USER")
|
||||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
c = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=managementDatabase,
|
||||
dbUser=dbUser,
|
||||
dbPassword=dbPassword,
|
||||
dbPort=dbPort,
|
||||
userId=None,
|
||||
)
|
||||
if not c or not c.connection:
|
||||
print("STAGE0: DB_CONNECTION=none (check config.ini / .env)")
|
||||
raise SystemExit(2)
|
||||
|
||||
cur = c.connection.cursor()
|
||||
|
||||
|
||||
def _scalar(cur):
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
if isinstance(row, dict):
|
||||
return next(iter(row.values()))
|
||||
return row[0]
|
||||
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = 'FileFolder'
|
||||
) AS ok
|
||||
"""
|
||||
)
|
||||
print("STAGE0: FileFolder_table=", _scalar(cur))
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'FileItem' AND column_name = 'folderId'
|
||||
) AS ok
|
||||
"""
|
||||
)
|
||||
print("STAGE0: FileItem_folderId_column=", _scalar(cur))
|
||||
cur.close()
|
||||
327
tests/unit/interfaces/test_folderRbac.py
Normal file
327
tests/unit/interfaces/test_folderRbac.py
Normal file
|
|
@ -0,0 +1,327 @@
|
|||
# Copyright (c) 2026 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Unit tests for folder RBAC two-user matrix (ownership & scope visibility)."""
|
||||
|
||||
import uuid
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelFiles import FileFolder, FileItem
|
||||
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects, FileNotFoundError
|
||||
|
||||
|
||||
_MANDATE_ID = "mandate-test-1"
|
||||
_FEATURE_INSTANCE_ID = "fi-test-1"
|
||||
_USER_A = "user-a-id"
|
||||
_USER_B = "user-b-id"
|
||||
|
||||
|
||||
# ── Fakes & helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
class _FakeDb:
|
||||
"""In-memory database mock."""
|
||||
|
||||
def __init__(self):
|
||||
self._tables: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||
self.connection = MagicMock()
|
||||
|
||||
def getRecordset(self, modelClass, recordFilter=None):
|
||||
tableName = modelClass.__name__
|
||||
records = list(self._tables.get(tableName, {}).values())
|
||||
if not recordFilter:
|
||||
return records
|
||||
return [
|
||||
r for r in records
|
||||
if all(r.get(k) == v for k, v in recordFilter.items())
|
||||
]
|
||||
|
||||
def recordCreate(self, modelClass, data):
|
||||
tableName = modelClass.__name__
|
||||
self._tables.setdefault(tableName, {})
|
||||
rec = data.model_dump() if hasattr(data, "model_dump") else dict(data)
|
||||
rec.setdefault("id", str(uuid.uuid4()))
|
||||
self._tables[tableName][rec["id"]] = rec
|
||||
return rec
|
||||
|
||||
def recordModify(self, modelClass, recordId, updates):
|
||||
tbl = self._tables.get(modelClass.__name__, {})
|
||||
if recordId in tbl:
|
||||
tbl[recordId].update(updates)
|
||||
return True
|
||||
return False
|
||||
|
||||
def recordDelete(self, modelClass, recordId):
|
||||
tbl = self._tables.get(modelClass.__name__, {})
|
||||
if recordId in tbl:
|
||||
del tbl[recordId]
|
||||
return True
|
||||
return False
|
||||
|
||||
def updateContext(self, userId):
|
||||
pass
|
||||
|
||||
def _ensure_connection(self):
|
||||
pass
|
||||
|
||||
def _ensureTableExists(self, modelClass):
|
||||
return True
|
||||
|
||||
def seed(self, modelClass, record: Dict[str, Any]):
|
||||
tableName = modelClass.__name__
|
||||
self._tables.setdefault(tableName, {})
|
||||
self._tables[tableName][record["id"]] = dict(record)
|
||||
|
||||
|
||||
def _makeUser(userId, username="testuser"):
|
||||
return User(id=userId, username=username, language="en")
|
||||
|
||||
|
||||
def _makeRbac(
|
||||
createLevel=AccessLevel.ALL,
|
||||
readLevel=AccessLevel.ALL,
|
||||
updateLevel=AccessLevel.MY,
|
||||
deleteLevel=AccessLevel.MY,
|
||||
):
|
||||
"""Default: regular user can read all, but write only own records."""
|
||||
rbac = Mock()
|
||||
perms = UserPermissions(
|
||||
view=True,
|
||||
read=readLevel,
|
||||
create=createLevel,
|
||||
update=updateLevel,
|
||||
delete=deleteLevel,
|
||||
)
|
||||
rbac.getUserPermissions.return_value = perms
|
||||
return rbac
|
||||
|
||||
|
||||
def _buildComponent(userId, fakeDb, rbac=None):
|
||||
with patch.object(ComponentObjects, "__init__", lambda self: None):
|
||||
comp = ComponentObjects()
|
||||
comp.db = fakeDb
|
||||
comp.currentUser = _makeUser(userId)
|
||||
comp.userId = userId
|
||||
comp.mandateId = _MANDATE_ID
|
||||
comp.featureInstanceId = _FEATURE_INSTANCE_ID
|
||||
comp.rbac = rbac or _makeRbac()
|
||||
comp.userLanguage = "en"
|
||||
return comp
|
||||
|
||||
|
||||
def _makeFolder(
|
||||
folderId=None, name="Folder", parentId=None,
|
||||
userId=_USER_A, scope="personal", neutralize=False,
|
||||
):
|
||||
return {
|
||||
"id": folderId or str(uuid.uuid4()),
|
||||
"name": name,
|
||||
"parentId": parentId,
|
||||
"mandateId": _MANDATE_ID,
|
||||
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||
"scope": scope,
|
||||
"neutralize": neutralize,
|
||||
"sysCreatedBy": userId,
|
||||
"sysCreatedAt": 1700000000.0,
|
||||
"sysModifiedAt": 1700000000.0,
|
||||
"sysModifiedBy": None,
|
||||
}
|
||||
|
||||
|
||||
def _makeFile(fileId=None, folderId=None, userId=_USER_A, scope="personal"):
|
||||
return {
|
||||
"id": fileId or str(uuid.uuid4()),
|
||||
"fileName": "test.txt",
|
||||
"mimeType": "text/plain",
|
||||
"fileHash": "abc123",
|
||||
"fileSize": 100,
|
||||
"folderId": folderId,
|
||||
"mandateId": _MANDATE_ID,
|
||||
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||
"scope": scope,
|
||||
"neutralize": False,
|
||||
"sysCreatedBy": userId,
|
||||
"sysCreatedAt": 1700000000.0,
|
||||
"sysModifiedAt": 1700000000.0,
|
||||
"sysModifiedBy": None,
|
||||
"tags": None,
|
||||
"description": None,
|
||||
"status": None,
|
||||
}
|
||||
|
||||
|
||||
def _scopeAwareMock(fakeDb):
|
||||
"""Side-effect for getRecordsetWithRBAC that simulates scope-based visibility.
|
||||
|
||||
Visibility rules:
|
||||
- Owner (sysCreatedBy == currentUser.id) always sees the record
|
||||
- scope='global' -> visible to everyone
|
||||
- scope='mandate' -> visible when mandateId matches
|
||||
- scope='featureInstance' -> visible when featureInstanceId matches
|
||||
- scope='personal' -> owner only (already covered above)
|
||||
"""
|
||||
def _fn(connector, modelClass, currentUser, recordFilter=None, **kwargs):
|
||||
requestMandateId = kwargs.get("mandateId", _MANDATE_ID)
|
||||
requestFiId = kwargs.get("featureInstanceId", _FEATURE_INSTANCE_ID)
|
||||
allRecords = fakeDb.getRecordset(modelClass, recordFilter=recordFilter)
|
||||
visible = []
|
||||
for rec in allRecords:
|
||||
if rec.get("sysCreatedBy") == currentUser.id:
|
||||
visible.append(rec)
|
||||
continue
|
||||
scope = rec.get("scope", "personal")
|
||||
if scope == "global":
|
||||
visible.append(rec)
|
||||
elif scope == "mandate" and rec.get("mandateId") == requestMandateId:
|
||||
visible.append(rec)
|
||||
elif scope == "featureInstance" and rec.get("featureInstanceId") == requestFiId:
|
||||
visible.append(rec)
|
||||
return visible
|
||||
return _fn
|
||||
|
||||
|
||||
# ── Test class ───────────────────────────────────────────────────────────────
|
||||
|
||||
@patch("modules.interfaces.interfaceDbManagement.getRecordsetWithRBAC")
|
||||
class TestFolderRbac:
|
||||
"""Two-user matrix: ownership, scope visibility, and write-access guards."""
|
||||
|
||||
# ── 1. Ownership visibility ───────────────────────────────────────────
|
||||
|
||||
def testUserAFolderInOwnTreeNotInUserBOwnTree(self, mockRbacGet):
|
||||
"""User A's personal folder appears in A's own tree, not in B's."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", name="A-Folder", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compA = _buildComponent(_USER_A, fakeDb)
|
||||
ownA = compA.getOwnFolderTree()
|
||||
assert any(f["id"] == "fa-1" for f in ownA)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
ownB = compB.getOwnFolderTree()
|
||||
assert not any(f["id"] == "fa-1" for f in ownB)
|
||||
|
||||
# ── 2. Scope change -> shared visibility ──────────────────────────────
|
||||
|
||||
def testScopeChangeToMandateMakesVisibleToUserB(self, mockRbacGet):
|
||||
"""Changing scope from personal to mandate makes the folder appear
|
||||
in User B's shared tree."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="personal", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
sharedBefore = compB.getSharedFolderTree()
|
||||
assert not any(f["id"] == "fa-1" for f in sharedBefore)
|
||||
|
||||
fakeDb.recordModify(FileFolder, "fa-1", {"scope": "mandate"})
|
||||
|
||||
sharedAfter = compB.getSharedFolderTree()
|
||||
assert any(f["id"] == "fa-1" for f in sharedAfter)
|
||||
|
||||
# ── 3-7. Non-owner cannot mutate ──────────────────────────────────────
|
||||
|
||||
def testUserBCannotRenameFolderOfUserA(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
with pytest.raises(PermissionError):
|
||||
compB.renameFolder("fa-1", "Hijacked")
|
||||
|
||||
def testUserBCannotMoveFolderOfUserA(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fb-1", scope="mandate", userId=_USER_B))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
with pytest.raises(PermissionError):
|
||||
compB.moveFolder("fa-1", "fb-1")
|
||||
|
||||
def testUserBCannotDeleteFolderOfUserA(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
with pytest.raises(PermissionError):
|
||||
compB.deleteFolderCascade("fa-1")
|
||||
|
||||
def testUserBCannotPatchScopeOnFolderOfUserA(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
with pytest.raises(PermissionError):
|
||||
compB.patchFolderScope("fa-1", "personal")
|
||||
|
||||
def testUserBCannotPatchNeutralizeOnFolderOfUserA(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
with pytest.raises(PermissionError):
|
||||
compB.patchFolderNeutralize("fa-1", True)
|
||||
|
||||
# ── 8. contextOrphan ──────────────────────────────────────────────────
|
||||
|
||||
def testContextOrphanWhenParentFolderNotShared(self, mockRbacGet):
|
||||
"""User A's parent folder is personal, child folder is mandate.
|
||||
User B sees only the child, flagged as contextOrphan."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="parent-f", name="Private Parent", userId=_USER_A, scope="personal",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="child-f", name="Shared Child", userId=_USER_A,
|
||||
parentId="parent-f", scope="mandate",
|
||||
))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
shared = compB.getSharedFolderTree()
|
||||
|
||||
assert len(shared) == 1
|
||||
assert shared[0]["id"] == "child-f"
|
||||
assert shared[0]["contextOrphan"] is True
|
||||
|
||||
# ── 9. Shared folder children visible ─────────────────────────────────
|
||||
|
||||
def testSharedFolderMakesChildrenVisible(self, mockRbacGet):
|
||||
"""When User A shares a folder tree (scope=mandate), all child folders
|
||||
become visible in User B's shared tree."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="root-f", name="Root", userId=_USER_A, scope="mandate",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="child1-f", name="Child 1", userId=_USER_A,
|
||||
parentId="root-f", scope="mandate",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="child2-f", name="Child 2", userId=_USER_A,
|
||||
parentId="root-f", scope="mandate",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="grandchild-f", name="Grandchild", userId=_USER_A,
|
||||
parentId="child1-f", scope="mandate",
|
||||
))
|
||||
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||
|
||||
compB = _buildComponent(_USER_B, fakeDb)
|
||||
shared = compB.getSharedFolderTree()
|
||||
|
||||
sharedIds = {f["id"] for f in shared}
|
||||
assert sharedIds == {"root-f", "child1-f", "child2-f", "grandchild-f"}
|
||||
|
||||
byId = {f["id"]: f for f in shared}
|
||||
assert byId["root-f"]["contextOrphan"] is False
|
||||
assert byId["child1-f"]["contextOrphan"] is False
|
||||
assert byId["child2-f"]["contextOrphan"] is False
|
||||
assert byId["grandchild-f"]["contextOrphan"] is False
|
||||
392
tests/unit/routes/test_folder_crud.py
Normal file
392
tests/unit/routes/test_folder_crud.py
Normal file
|
|
@ -0,0 +1,392 @@
|
|||
# Copyright (c) 2026 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Unit tests for folder CRUD operations in ComponentObjects."""
|
||||
|
||||
import uuid
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelFiles import FileFolder, FileItem
|
||||
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects, FileNotFoundError
|
||||
|
||||
|
||||
_MANDATE_ID = "mandate-test-1"
|
||||
_FEATURE_INSTANCE_ID = "fi-test-1"
|
||||
_USER_ID = "user-a-id"
|
||||
|
||||
|
||||
# ── Fakes & helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
class _FakeDb:
|
||||
"""In-memory database mock that mimics DatabaseConnector for unit tests."""
|
||||
|
||||
def __init__(self):
|
||||
self._tables: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||
self.connection = MagicMock()
|
||||
|
||||
def getRecordset(self, modelClass, recordFilter=None):
|
||||
tableName = modelClass.__name__
|
||||
records = list(self._tables.get(tableName, {}).values())
|
||||
if not recordFilter:
|
||||
return records
|
||||
return [
|
||||
r for r in records
|
||||
if all(r.get(k) == v for k, v in recordFilter.items())
|
||||
]
|
||||
|
||||
def recordCreate(self, modelClass, data):
|
||||
tableName = modelClass.__name__
|
||||
self._tables.setdefault(tableName, {})
|
||||
rec = data.model_dump() if hasattr(data, "model_dump") else dict(data)
|
||||
rec.setdefault("id", str(uuid.uuid4()))
|
||||
self._tables[tableName][rec["id"]] = rec
|
||||
return rec
|
||||
|
||||
def recordModify(self, modelClass, recordId, updates):
|
||||
tableName = modelClass.__name__
|
||||
tbl = self._tables.get(tableName, {})
|
||||
if recordId in tbl:
|
||||
tbl[recordId].update(updates)
|
||||
return True
|
||||
return False
|
||||
|
||||
def recordDelete(self, modelClass, recordId):
|
||||
tableName = modelClass.__name__
|
||||
tbl = self._tables.get(tableName, {})
|
||||
if recordId in tbl:
|
||||
del tbl[recordId]
|
||||
return True
|
||||
return False
|
||||
|
||||
def updateContext(self, userId):
|
||||
pass
|
||||
|
||||
def _ensure_connection(self):
|
||||
pass
|
||||
|
||||
def _ensureTableExists(self, modelClass):
|
||||
return True
|
||||
|
||||
def seed(self, modelClass, record: Dict[str, Any]):
|
||||
tableName = modelClass.__name__
|
||||
self._tables.setdefault(tableName, {})
|
||||
self._tables[tableName][record["id"]] = dict(record)
|
||||
|
||||
|
||||
def _makeUser(userId=_USER_ID, username="testuser"):
|
||||
return User(id=userId, username=username, language="en")
|
||||
|
||||
|
||||
def _makeRbac(
|
||||
createLevel=AccessLevel.ALL,
|
||||
readLevel=AccessLevel.ALL,
|
||||
updateLevel=AccessLevel.ALL,
|
||||
deleteLevel=AccessLevel.ALL,
|
||||
):
|
||||
rbac = Mock()
|
||||
perms = UserPermissions(
|
||||
view=True,
|
||||
read=readLevel,
|
||||
create=createLevel,
|
||||
update=updateLevel,
|
||||
delete=deleteLevel,
|
||||
)
|
||||
rbac.getUserPermissions.return_value = perms
|
||||
return rbac
|
||||
|
||||
|
||||
def _buildComponent(
|
||||
userId=_USER_ID,
|
||||
fakeDb=None,
|
||||
rbac=None,
|
||||
mandateId=_MANDATE_ID,
|
||||
featureInstanceId=_FEATURE_INSTANCE_ID,
|
||||
):
|
||||
"""Construct a ComponentObjects with mocked internals (no real DB)."""
|
||||
with patch.object(ComponentObjects, "__init__", lambda self: None):
|
||||
comp = ComponentObjects()
|
||||
comp.db = fakeDb or _FakeDb()
|
||||
comp.currentUser = _makeUser(userId)
|
||||
comp.userId = userId
|
||||
comp.mandateId = mandateId
|
||||
comp.featureInstanceId = featureInstanceId
|
||||
comp.rbac = rbac or _makeRbac()
|
||||
comp.userLanguage = "en"
|
||||
return comp
|
||||
|
||||
|
||||
def _rbacFromFakeDb(fakeDb):
|
||||
"""Side-effect for getRecordsetWithRBAC that delegates to _FakeDb."""
|
||||
def _fn(connector, modelClass, currentUser, recordFilter=None, **kwargs):
|
||||
return fakeDb.getRecordset(modelClass, recordFilter=recordFilter)
|
||||
return _fn
|
||||
|
||||
|
||||
def _makeFolder(
|
||||
folderId=None, name="Folder", parentId=None,
|
||||
userId=_USER_ID, scope="personal", neutralize=False,
|
||||
):
|
||||
return {
|
||||
"id": folderId or str(uuid.uuid4()),
|
||||
"name": name,
|
||||
"parentId": parentId,
|
||||
"mandateId": _MANDATE_ID,
|
||||
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||
"scope": scope,
|
||||
"neutralize": neutralize,
|
||||
"sysCreatedBy": userId,
|
||||
"sysCreatedAt": 1700000000.0,
|
||||
"sysModifiedAt": 1700000000.0,
|
||||
"sysModifiedBy": None,
|
||||
}
|
||||
|
||||
|
||||
def _makeFile(fileId=None, folderId=None, userId=_USER_ID, scope="personal"):
|
||||
return {
|
||||
"id": fileId or str(uuid.uuid4()),
|
||||
"fileName": "test.txt",
|
||||
"mimeType": "text/plain",
|
||||
"fileHash": "abc123",
|
||||
"fileSize": 100,
|
||||
"folderId": folderId,
|
||||
"mandateId": _MANDATE_ID,
|
||||
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||
"scope": scope,
|
||||
"neutralize": False,
|
||||
"sysCreatedBy": userId,
|
||||
"sysCreatedAt": 1700000000.0,
|
||||
"sysModifiedAt": 1700000000.0,
|
||||
"sysModifiedBy": None,
|
||||
"tags": None,
|
||||
"description": None,
|
||||
"status": None,
|
||||
}
|
||||
|
||||
|
||||
# ── Test class ───────────────────────────────────────────────────────────────
|
||||
|
||||
@patch("modules.interfaces.interfaceDbManagement.getRecordsetWithRBAC")
|
||||
class TestFolderCrud:
|
||||
"""Tests for folder create / rename / move / delete / patch operations."""
|
||||
|
||||
# ── Create ────────────────────────────────────────────────────────────
|
||||
|
||||
def testCreateFolderHappyPath(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.createFolder("Test Folder")
|
||||
|
||||
assert result["name"] == "Test Folder"
|
||||
assert result["scope"] == "personal"
|
||||
assert result["parentId"] is None
|
||||
assert result["mandateId"] == _MANDATE_ID
|
||||
|
||||
def testCreateFolderWithParent(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="parent-1", name="Parent"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.createFolder("Child Folder", parentId="parent-1")
|
||||
|
||||
assert result["name"] == "Child Folder"
|
||||
assert result["parentId"] == "parent-1"
|
||||
|
||||
def testCreateFolderMissingNameNoInterfaceValidation(self, mockRbacGet):
|
||||
"""Interface does not validate empty name; the route layer returns 400."""
|
||||
fakeDb = _FakeDb()
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.createFolder("")
|
||||
assert result["name"] == ""
|
||||
|
||||
# ── Rename ────────────────────────────────────────────────────────────
|
||||
|
||||
def testRenameFolderHappyPath(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Old Name"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.renameFolder("f-1", "New Name")
|
||||
|
||||
assert result["name"] == "New Name"
|
||||
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["name"] == "New Name"
|
||||
|
||||
def testRenameFolderNotFound(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
comp.renameFolder("nonexistent", "New Name")
|
||||
|
||||
# ── Move ──────────────────────────────────────────────────────────────
|
||||
|
||||
def testMoveFolderHappyPath(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Movable"))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="t-1", name="Target"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.moveFolder("f-1", "t-1")
|
||||
|
||||
assert result["parentId"] == "t-1"
|
||||
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["parentId"] == "t-1"
|
||||
|
||||
def testMoveFolderToRoot(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Nested", parentId="old"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.moveFolder("f-1", None)
|
||||
|
||||
assert result["parentId"] is None
|
||||
|
||||
def testMoveFolderCircularReference(self, mockRbacGet):
|
||||
"""A -> B -> C: moving A under C creates a cycle."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="a", name="A", parentId=None))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="b", name="B", parentId="a"))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="c", name="C", parentId="b"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
with pytest.raises(ValueError, match="circular reference"):
|
||||
comp.moveFolder("a", "c")
|
||||
|
||||
# ── Delete cascade ────────────────────────────────────────────────────
|
||||
|
||||
def testDeleteFolderCascade(self, mockRbacGet):
|
||||
"""Deleting root folder removes root + child + their files."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="root", name="Root"))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="child", name="Child", parentId="root"))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="root"))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="file-2", folderId="child"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.deleteFolderCascade("root")
|
||||
|
||||
assert result["deletedFolders"] == 2
|
||||
assert result["deletedFiles"] == 2
|
||||
|
||||
def testDeleteFolderNotFound(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
comp.deleteFolderCascade("nonexistent")
|
||||
|
||||
# ── Patch scope ───────────────────────────────────────────────────────
|
||||
|
||||
def testPatchScopeNoCascade(self, mockRbacGet):
|
||||
"""Change folder scope without cascading to files."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", scope="personal"))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="f-1"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.patchFolderScope("f-1", "mandate", cascadeToFiles=False)
|
||||
|
||||
assert result["scope"] == "mandate"
|
||||
assert result["filesUpdated"] == 0
|
||||
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["scope"] == "mandate"
|
||||
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["scope"] == "personal"
|
||||
|
||||
def testPatchScopeWithCascade(self, mockRbacGet):
|
||||
"""cascadeToFiles=True updates only owned files in the folder."""
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", scope="personal"))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="own-file", folderId="f-1"))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="other-file", folderId="f-1", userId="user-b"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.patchFolderScope("f-1", "mandate", cascadeToFiles=True)
|
||||
|
||||
assert result["filesUpdated"] == 1
|
||||
assert fakeDb.getRecordset(FileItem, {"id": "own-file"})[0]["scope"] == "mandate"
|
||||
assert fakeDb.getRecordset(FileItem, {"id": "other-file"})[0]["scope"] == "personal"
|
||||
|
||||
def testPatchScopeInvalid(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
with pytest.raises(ValueError, match="Invalid scope"):
|
||||
comp.patchFolderScope("f-1", "invalid_scope")
|
||||
|
||||
# ── Patch neutralize ──────────────────────────────────────────────────
|
||||
|
||||
def testPatchNeutralizeToggle(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", neutralize=False))
|
||||
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="f-1"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
resultOn = comp.patchFolderNeutralize("f-1", True)
|
||||
assert resultOn["neutralize"] is True
|
||||
assert resultOn["filesUpdated"] == 1
|
||||
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["neutralize"] is True
|
||||
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["neutralize"] is True
|
||||
|
||||
resultOff = comp.patchFolderNeutralize("f-1", False)
|
||||
assert resultOff["neutralize"] is False
|
||||
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["neutralize"] is False
|
||||
|
||||
# ── Tree queries ──────────────────────────────────────────────────────
|
||||
|
||||
def testGetOwnFolderTree(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="own-1", name="Mine"))
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="other-1", name="Theirs", userId="user-b"))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.getOwnFolderTree()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["id"] == "own-1"
|
||||
|
||||
def testGetSharedFolderTreeWithContextOrphan(self, mockRbacGet):
|
||||
fakeDb = _FakeDb()
|
||||
fakeDb.seed(FileFolder, _makeFolder(folderId="own", name="Own"))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="shared-root", name="Shared Root", userId="user-b", scope="mandate",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="shared-child", name="Shared Child", userId="user-b",
|
||||
parentId="shared-root", scope="mandate",
|
||||
))
|
||||
fakeDb.seed(FileFolder, _makeFolder(
|
||||
folderId="orphan", name="Orphan", userId="user-b",
|
||||
parentId="invisible-parent", scope="mandate",
|
||||
))
|
||||
comp = _buildComponent(fakeDb=fakeDb)
|
||||
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||
|
||||
result = comp.getSharedFolderTree()
|
||||
|
||||
ids = {r["id"] for r in result}
|
||||
assert "own" not in ids
|
||||
assert "shared-root" in ids
|
||||
assert "shared-child" in ids
|
||||
assert "orphan" in ids
|
||||
|
||||
byId = {r["id"]: r for r in result}
|
||||
assert byId["shared-root"]["contextOrphan"] is False
|
||||
assert byId["shared-child"]["contextOrphan"] is False
|
||||
assert byId["orphan"]["contextOrphan"] is True
|
||||
Loading…
Reference in a new issue