fixed component formgeneratortree and truastee workflows
This commit is contained in:
parent
d3d682fe4d
commit
3da6e24bec
31 changed files with 1727 additions and 69 deletions
|
|
@ -210,6 +210,9 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
data = await self._svc.getTask(task_id)
|
data = await self._svc.getTask(task_id)
|
||||||
if isinstance(data, dict) and data.get("error"):
|
if isinstance(data, dict) and data.get("error"):
|
||||||
return json.dumps(data).encode("utf-8")
|
return json.dumps(data).encode("utf-8")
|
||||||
|
returnedId = data.get("id", "") if isinstance(data, dict) else ""
|
||||||
|
if returnedId and returnedId != task_id:
|
||||||
|
logger.warning(f"ClickUp download: requested task_id={task_id} but API returned id={returnedId}")
|
||||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -155,9 +155,12 @@ def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
||||||
return coerceDocumentReferenceList(value[innerKey])
|
return coerceDocumentReferenceList(value[innerKey])
|
||||||
docId = value.get("documentId") or value.get("id")
|
docId = value.get("documentId") or value.get("id")
|
||||||
if docId:
|
if docId:
|
||||||
|
docIdStr = str(docId)
|
||||||
|
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||||
|
return DocumentReferenceList.from_string_list([docIdStr])
|
||||||
return DocumentReferenceList(references=[
|
return DocumentReferenceList(references=[
|
||||||
DocumentItemReference(
|
DocumentItemReference(
|
||||||
documentId=str(docId),
|
documentId=docIdStr,
|
||||||
fileName=value.get("fileName") or value.get("name"),
|
fileName=value.get("fileName") or value.get("name"),
|
||||||
)
|
)
|
||||||
])
|
])
|
||||||
|
|
@ -180,10 +183,15 @@ def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
||||||
continue
|
continue
|
||||||
docId = item.get("documentId") or item.get("id")
|
docId = item.get("documentId") or item.get("id")
|
||||||
if docId:
|
if docId:
|
||||||
references.append(DocumentItemReference(
|
docIdStr = str(docId)
|
||||||
documentId=str(docId),
|
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||||
fileName=item.get("fileName") or item.get("name"),
|
parsed = DocumentReferenceList.from_string_list([docIdStr])
|
||||||
))
|
references.extend(parsed.references)
|
||||||
|
else:
|
||||||
|
references.append(DocumentItemReference(
|
||||||
|
documentId=docIdStr,
|
||||||
|
fileName=item.get("fileName") or item.get("name"),
|
||||||
|
))
|
||||||
elif item.get("label"):
|
elif item.get("label"):
|
||||||
references.append(DocumentListReference(
|
references.append(DocumentListReference(
|
||||||
label=str(item["label"]),
|
label=str(item["label"]),
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,69 @@ import uuid
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Ordner")
|
||||||
|
class FileFolder(PowerOnModel):
|
||||||
|
"""Persistenter Datei-Ordner im Management-DB-Kontext (RBAC wie FileItem)."""
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
description="Primary key",
|
||||||
|
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
name: str = Field(
|
||||||
|
description="Display name of the folder",
|
||||||
|
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||||
|
)
|
||||||
|
parentId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Parent folder id; empty or None for root",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Uebergeordneter Ordner",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mandateId: Optional[str] = Field(
|
||||||
|
default="",
|
||||||
|
description="ID of the mandate this folder belongs to",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Mandant",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
default="",
|
||||||
|
description="ID of the feature instance this folder belongs to",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Feature-Instanz",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
scope: str = Field(
|
||||||
|
default="personal",
|
||||||
|
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||||
|
json_schema_extra={"label": "Sichtbarkeit", "frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||||
|
{"value": "personal", "label": "Persönlich"},
|
||||||
|
{"value": "featureInstance", "label": "Feature-Instanz"},
|
||||||
|
{"value": "mandate", "label": "Mandant"},
|
||||||
|
{"value": "global", "label": "Global"},
|
||||||
|
]},
|
||||||
|
)
|
||||||
|
neutralize: bool = Field(
|
||||||
|
default=False,
|
||||||
|
description="Whether files in this folder should be neutralized before AI processing",
|
||||||
|
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@i18nModel("Datei")
|
@i18nModel("Datei")
|
||||||
class FileItem(PowerOnModel):
|
class FileItem(PowerOnModel):
|
||||||
"""Metadaten einer gespeicherten Datei."""
|
"""Metadaten einer gespeicherten Datei."""
|
||||||
|
|
@ -44,6 +107,17 @@ class FileItem(PowerOnModel):
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
folderId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="ID of the folder containing this file (if any)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Ordner",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||||
|
},
|
||||||
|
)
|
||||||
mimeType: str = Field(
|
mimeType: str = Field(
|
||||||
description="MIME type of the file",
|
description="MIME type of the file",
|
||||||
json_schema_extra={"label": "MIME-Typ", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
json_schema_extra={"label": "MIME-Typ", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetP
|
||||||
from modules.security.rbac import RbacClass
|
from modules.security.rbac import RbacClass
|
||||||
from modules.datamodels.datamodelRbac import AccessRuleContext
|
from modules.datamodels.datamodelRbac import AccessRuleContext
|
||||||
from modules.datamodels.datamodelUam import AccessLevel
|
from modules.datamodels.datamodelUam import AccessLevel
|
||||||
from modules.datamodels.datamodelFiles import FilePreview, FileItem, FileData
|
from modules.datamodels.datamodelFiles import FilePreview, FileItem, FileData, FileFolder
|
||||||
from modules.datamodels.datamodelUtils import Prompt
|
from modules.datamodels.datamodelUtils import Prompt
|
||||||
from modules.datamodels.datamodelMessaging import (
|
from modules.datamodels.datamodelMessaging import (
|
||||||
MessagingSubscription,
|
MessagingSubscription,
|
||||||
|
|
@ -1067,7 +1067,242 @@ class ComponentObjects:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error converting file record: {str(e)}")
|
logger.error(f"Error converting file record: {str(e)}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# ── Folder methods ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def getOwnFolderTree(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Folders owned by the current user, filtered via RBAC."""
|
||||||
|
return getRecordsetWithRBAC(
|
||||||
|
self.db, FileFolder, self.currentUser,
|
||||||
|
recordFilter={"sysCreatedBy": self.userId},
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getSharedFolderTree(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Folders visible via scope but NOT owned by the current user.
|
||||||
|
Adds contextOrphan=True when a folder's parentId is not in the result set."""
|
||||||
|
allFolders = getRecordsetWithRBAC(
|
||||||
|
self.db, FileFolder, self.currentUser,
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
shared = [f for f in allFolders if f.get("sysCreatedBy") != self.userId]
|
||||||
|
sharedIds = {f["id"] for f in shared}
|
||||||
|
for f in shared:
|
||||||
|
f["contextOrphan"] = bool(f.get("parentId") and f["parentId"] not in sharedIds)
|
||||||
|
return shared
|
||||||
|
|
||||||
|
def getFolder(self, folderId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Return a single folder dict or None."""
|
||||||
|
results = getRecordsetWithRBAC(
|
||||||
|
self.db, FileFolder, self.currentUser,
|
||||||
|
recordFilter={"id": folderId},
|
||||||
|
mandateId=self.mandateId,
|
||||||
|
featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
return results[0] if results else None
|
||||||
|
|
||||||
|
def _isFolderOwner(self, folder) -> bool:
|
||||||
|
createdBy = (
|
||||||
|
getattr(folder, "sysCreatedBy", None)
|
||||||
|
or (folder.get("sysCreatedBy") if isinstance(folder, dict) else None)
|
||||||
|
)
|
||||||
|
return createdBy == self.userId
|
||||||
|
|
||||||
|
def _requireFolderWriteAccess(self, folder, folderId: str, operation: str = "update"):
|
||||||
|
"""Raise PermissionError if the user cannot mutate this folder.
|
||||||
|
Owners always can. Non-owners need RBAC ALL level."""
|
||||||
|
if self._isFolderOwner(folder):
|
||||||
|
return
|
||||||
|
from modules.interfaces.interfaceRbac import buildDataObjectKey
|
||||||
|
objectKey = buildDataObjectKey("FileFolder")
|
||||||
|
permissions = self.rbac.getUserPermissions(
|
||||||
|
self.currentUser, AccessRuleContext.DATA, objectKey,
|
||||||
|
mandateId=self.mandateId, featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
level = getattr(permissions, operation, None)
|
||||||
|
if level != AccessLevel.ALL:
|
||||||
|
raise PermissionError(
|
||||||
|
f"No permission to {operation} folder {folderId} (not owner, access level: {level})"
|
||||||
|
)
|
||||||
|
|
||||||
|
def createFolder(self, name: str, parentId: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
if not self.checkRbacPermission(FileFolder, "create"):
|
||||||
|
raise PermissionError("No permission to create folders")
|
||||||
|
folder = FileFolder(
|
||||||
|
name=name,
|
||||||
|
parentId=parentId,
|
||||||
|
mandateId=self.mandateId or "",
|
||||||
|
featureInstanceId=self.featureInstanceId or "",
|
||||||
|
scope="personal",
|
||||||
|
neutralize=False,
|
||||||
|
)
|
||||||
|
self.db.recordCreate(FileFolder, folder)
|
||||||
|
return folder.model_dump()
|
||||||
|
|
||||||
|
def renameFolder(self, folderId: str, newName: str) -> Dict[str, Any]:
|
||||||
|
folder = self.getFolder(folderId)
|
||||||
|
if not folder:
|
||||||
|
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||||
|
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||||
|
self.db.recordModify(FileFolder, folderId, {"name": newName})
|
||||||
|
folder["name"] = newName
|
||||||
|
return folder
|
||||||
|
|
||||||
|
def moveFolder(self, folderId: str, newParentId: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
folder = self.getFolder(folderId)
|
||||||
|
if not folder:
|
||||||
|
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||||
|
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||||
|
|
||||||
|
if newParentId:
|
||||||
|
parent = self.getFolder(newParentId)
|
||||||
|
if not parent:
|
||||||
|
raise FileNotFoundError(f"Target parent folder {newParentId} not found")
|
||||||
|
self._requireFolderWriteAccess(parent, newParentId, "update")
|
||||||
|
# Circular-reference guard: newParentId must not be a descendant of folderId
|
||||||
|
if self._isDescendant(newParentId, folderId):
|
||||||
|
raise ValueError(f"Cannot move folder into its own subtree (circular reference)")
|
||||||
|
|
||||||
|
self.db.recordModify(FileFolder, folderId, {"parentId": newParentId})
|
||||||
|
folder["parentId"] = newParentId
|
||||||
|
return folder
|
||||||
|
|
||||||
|
def _isDescendant(self, candidateId: str, ancestorId: str) -> bool:
|
||||||
|
"""Return True if candidateId is a descendant of (or equal to) ancestorId."""
|
||||||
|
visited = set()
|
||||||
|
current = candidateId
|
||||||
|
while current:
|
||||||
|
if current == ancestorId:
|
||||||
|
return True
|
||||||
|
if current in visited:
|
||||||
|
break
|
||||||
|
visited.add(current)
|
||||||
|
f = self.getFolder(current)
|
||||||
|
current = f.get("parentId") if f else None
|
||||||
|
return False
|
||||||
|
|
||||||
|
def deleteFolderCascade(self, folderId: str) -> Dict[str, Any]:
|
||||||
|
"""Delete a folder and all owned sub-folders + their files."""
|
||||||
|
folder = self.getFolder(folderId)
|
||||||
|
if not folder:
|
||||||
|
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||||
|
self._requireFolderWriteAccess(folder, folderId, "delete")
|
||||||
|
|
||||||
|
folderIds = self._collectChildFolderIds(folderId)
|
||||||
|
|
||||||
|
# Verify all child folders are owned
|
||||||
|
for fid in folderIds:
|
||||||
|
if fid == folderId:
|
||||||
|
continue
|
||||||
|
child = self.getFolder(fid)
|
||||||
|
if child and not self._isFolderOwner(child):
|
||||||
|
raise PermissionError(f"Cannot delete folder tree: sub-folder {fid} is not owned by you")
|
||||||
|
|
||||||
|
# Collect files in those folders
|
||||||
|
fileRows = []
|
||||||
|
for fid in folderIds:
|
||||||
|
items = self.db.getRecordset(FileItem, recordFilter={"folderId": fid})
|
||||||
|
fileRows.extend(items)
|
||||||
|
|
||||||
|
for item in fileRows:
|
||||||
|
itemOwner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||||
|
if itemOwner != self.userId:
|
||||||
|
itemId = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||||
|
raise PermissionError(f"Cannot delete folder tree: file {itemId} is not owned by you")
|
||||||
|
|
||||||
|
fileIds = [
|
||||||
|
(item.get("id") if isinstance(item, dict) else getattr(item, "id", None))
|
||||||
|
for item in fileRows
|
||||||
|
]
|
||||||
|
|
||||||
|
# Single transaction: delete FileData, FileItem, then FileFolder (children first)
|
||||||
|
self.db._ensure_connection()
|
||||||
|
try:
|
||||||
|
with self.db.connection.cursor() as cursor:
|
||||||
|
if fileIds:
|
||||||
|
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (fileIds,))
|
||||||
|
cursor.execute('DELETE FROM "FileItem" WHERE "id" = ANY(%s)', (fileIds,))
|
||||||
|
orderedIds = list(folderIds)
|
||||||
|
orderedIds.remove(folderId)
|
||||||
|
orderedIds.append(folderId)
|
||||||
|
if orderedIds:
|
||||||
|
cursor.execute('DELETE FROM "FileFolder" WHERE "id" = ANY(%s)', (orderedIds,))
|
||||||
|
self.db.connection.commit()
|
||||||
|
except Exception:
|
||||||
|
self.db.connection.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
return {"deletedFolders": len(folderIds), "deletedFiles": len(fileIds)}
|
||||||
|
|
||||||
|
def _collectChildFolderIds(self, folderId: str) -> List[str]:
|
||||||
|
"""BFS to collect folderId + all descendant folder IDs owned by user."""
|
||||||
|
result = [folderId]
|
||||||
|
queue = [folderId]
|
||||||
|
while queue:
|
||||||
|
parentId = queue.pop(0)
|
||||||
|
children = self.db.getRecordset(FileFolder, recordFilter={"parentId": parentId})
|
||||||
|
for child in children:
|
||||||
|
cid = child.get("id") if isinstance(child, dict) else getattr(child, "id", None)
|
||||||
|
if cid and cid not in result:
|
||||||
|
result.append(cid)
|
||||||
|
queue.append(cid)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def patchFolderScope(self, folderId: str, scope: str, cascadeToFiles: bool = False) -> Dict[str, Any]:
|
||||||
|
validScopes = {"personal", "featureInstance", "mandate", "global"}
|
||||||
|
if scope not in validScopes:
|
||||||
|
raise ValueError(f"Invalid scope: {scope}. Must be one of {validScopes}")
|
||||||
|
|
||||||
|
folder = self.getFolder(folderId)
|
||||||
|
if not folder:
|
||||||
|
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||||
|
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||||
|
|
||||||
|
if scope == "global":
|
||||||
|
from modules.interfaces.interfaceRbac import buildDataObjectKey
|
||||||
|
objectKey = buildDataObjectKey("FileFolder")
|
||||||
|
permissions = self.rbac.getUserPermissions(
|
||||||
|
self.currentUser, AccessRuleContext.DATA, objectKey,
|
||||||
|
mandateId=self.mandateId, featureInstanceId=self.featureInstanceId,
|
||||||
|
)
|
||||||
|
if getattr(permissions, "update", None) != AccessLevel.ALL:
|
||||||
|
raise PermissionError("Setting global scope requires ALL permission")
|
||||||
|
|
||||||
|
self.db.recordModify(FileFolder, folderId, {"scope": scope})
|
||||||
|
|
||||||
|
filesUpdated = 0
|
||||||
|
if cascadeToFiles:
|
||||||
|
items = self.db.getRecordset(FileItem, recordFilter={"folderId": folderId})
|
||||||
|
for item in items:
|
||||||
|
owner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||||
|
if owner == self.userId:
|
||||||
|
iid = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||||
|
self.db.recordModify(FileItem, iid, {"scope": scope})
|
||||||
|
filesUpdated += 1
|
||||||
|
|
||||||
|
return {"folderId": folderId, "scope": scope, "filesUpdated": filesUpdated}
|
||||||
|
|
||||||
|
def patchFolderNeutralize(self, folderId: str, neutralize: bool) -> Dict[str, Any]:
|
||||||
|
folder = self.getFolder(folderId)
|
||||||
|
if not folder:
|
||||||
|
raise FileNotFoundError(f"Folder {folderId} not found")
|
||||||
|
self._requireFolderWriteAccess(folder, folderId, "update")
|
||||||
|
|
||||||
|
self.db.recordModify(FileFolder, folderId, {"neutralize": neutralize})
|
||||||
|
|
||||||
|
items = self.db.getRecordset(FileItem, recordFilter={"folderId": folderId})
|
||||||
|
filesUpdated = 0
|
||||||
|
for item in items:
|
||||||
|
owner = item.get("sysCreatedBy") if isinstance(item, dict) else getattr(item, "sysCreatedBy", None)
|
||||||
|
if owner == self.userId:
|
||||||
|
iid = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||||
|
self.db.recordModify(FileItem, iid, {"neutralize": neutralize})
|
||||||
|
filesUpdated += 1
|
||||||
|
|
||||||
|
return {"folderId": folderId, "neutralize": neutralize, "filesUpdated": filesUpdated}
|
||||||
|
|
||||||
def _isfileNameUnique(self, fileName: str, excludeFileId: Optional[str] = None) -> bool:
|
def _isfileNameUnique(self, fileName: str, excludeFileId: Optional[str] = None) -> bool:
|
||||||
"""Checks if a fileName is unique for the current user."""
|
"""Checks if a fileName is unique for the current user."""
|
||||||
# Get all files filtered by RBAC (will be filtered by user's access level)
|
# Get all files filtered by RBAC (will be filtered by user's access level)
|
||||||
|
|
|
||||||
|
|
@ -204,6 +204,7 @@ TABLE_NAMESPACE = {
|
||||||
# Files - benutzer-eigen
|
# Files - benutzer-eigen
|
||||||
"FileItem": "files",
|
"FileItem": "files",
|
||||||
"FileData": "files",
|
"FileData": "files",
|
||||||
|
"FileFolder": "files",
|
||||||
# Automation - benutzer-eigen
|
# Automation - benutzer-eigen
|
||||||
"AutomationDefinition": "automation",
|
"AutomationDefinition": "automation",
|
||||||
"AutomationTemplate": "automation",
|
"AutomationTemplate": "automation",
|
||||||
|
|
|
||||||
11
modules/migrations/_archive/README.md
Normal file
11
modules/migrations/_archive/README.md
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Archived one-off migrations
|
||||||
|
|
||||||
|
`migrate_folders_to_groups.py` copies `FileFolder` + `FileItem.folderId` into `TableGrouping` (`files/list`). It was used during an experimental UI path; **product choice** is to keep physical folders (`FileFolder`, `folderId`) and recover `FormGeneratorTree` (see `wiki/c-work/1-plan/2026-05-formgenerator-tree-and-folder-recovery.md`).
|
||||||
|
|
||||||
|
Run only if you need a historical data rescue:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd gateway
|
||||||
|
python -m modules.migrations._archive.migrate_folders_to_groups --verbose
|
||||||
|
python -m modules.migrations._archive.migrate_folders_to_groups --execute --verbose
|
||||||
|
```
|
||||||
1
modules/migrations/_archive/__init__.py
Normal file
1
modules/migrations/_archive/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
# Subpackage for archived one-off migration scripts (not part of normal app startup).
|
||||||
|
|
@ -1,11 +1,16 @@
|
||||||
"""
|
"""
|
||||||
One-time migration: Convert FileFolder tree + FileItem.folderId → table_groupings.
|
One-time migration: Convert FileFolder tree + FileItem.folderId to table_groupings.
|
||||||
|
|
||||||
|
Archived per wiki plan 2026-05-formgenerator-tree-and-folder-recovery (Stage 1.A).
|
||||||
|
Product direction: keep FileFolder + folderId; do not run DROP migrations.
|
||||||
|
This script remains for audit / one-off data rescue only.
|
||||||
|
|
||||||
Run this BEFORE dropping the physical FileFolder table and FileItem.folderId column
|
Run this BEFORE dropping the physical FileFolder table and FileItem.folderId column
|
||||||
from the database (those are separate Alembic/SQL steps).
|
from the database (those would be separate Alembic/SQL steps -- not part of current product path).
|
||||||
|
|
||||||
Usage:
|
Usage (from gateway working directory):
|
||||||
python -m modules.migrations.migrate_folders_to_groups [--dry-run] [--verbose]
|
python -m modules.migrations._archive.migrate_folders_to_groups [--dry-run] [--verbose]
|
||||||
|
python -m modules.migrations._archive.migrate_folders_to_groups --execute --verbose
|
||||||
|
|
||||||
Steps:
|
Steps:
|
||||||
1. For each distinct (userId, mandateId) combination that has FileFolder records:
|
1. For each distinct (userId, mandateId) combination that has FileFolder records:
|
||||||
|
|
@ -30,6 +35,14 @@ from typing import Optional
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _scalarRow(row):
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
if isinstance(row, dict):
|
||||||
|
return next(iter(row.values()))
|
||||||
|
return row[0]
|
||||||
|
|
||||||
|
|
||||||
# ── Helpers ──────────────────────────────────────────────────────────────────
|
# ── Helpers ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
def _build_tree(folders: list, parent_id: Optional[str]) -> list:
|
def _build_tree(folders: list, parent_id: Optional[str]) -> list:
|
||||||
|
|
@ -76,11 +89,19 @@ def _now_ts() -> str:
|
||||||
def run_migration(dry_run: bool = True, verbose: bool = False):
|
def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||||
"""Main migration entry point."""
|
"""Main migration entry point."""
|
||||||
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
|
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
|
||||||
logger.info(f"Starting folder→group migration (dry_run={dry_run})")
|
logger.info(f"Starting folder to group migration (dry_run={dry_run})")
|
||||||
|
|
||||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
|
||||||
connector = getCachedConnector()
|
connector = getCachedConnector(
|
||||||
|
dbHost=APP_CONFIG.get("DB_HOST", "_no_config_default_data"),
|
||||||
|
dbDatabase="poweron_management",
|
||||||
|
dbUser=APP_CONFIG.get("DB_USER"),
|
||||||
|
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
|
||||||
|
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||||
|
userId=None,
|
||||||
|
)
|
||||||
if not connector or not connector.connection:
|
if not connector or not connector.connection:
|
||||||
logger.error("Could not obtain a DB connection. Aborting.")
|
logger.error("Could not obtain a DB connection. Aborting.")
|
||||||
return
|
return
|
||||||
|
|
@ -93,17 +114,17 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||||
SELECT EXISTS (
|
SELECT EXISTS (
|
||||||
SELECT 1 FROM information_schema.tables
|
SELECT 1 FROM information_schema.tables
|
||||||
WHERE table_name = 'FileFolder'
|
WHERE table_name = 'FileFolder'
|
||||||
)
|
) AS ok
|
||||||
""")
|
""")
|
||||||
folder_table_exists = cur.fetchone()[0]
|
folder_table_exists = bool(_scalarRow(cur.fetchone()))
|
||||||
|
|
||||||
cur.execute("""
|
cur.execute("""
|
||||||
SELECT EXISTS (
|
SELECT EXISTS (
|
||||||
SELECT 1 FROM information_schema.columns
|
SELECT 1 FROM information_schema.columns
|
||||||
WHERE table_name = 'FileItem' AND column_name = 'folderId'
|
WHERE table_name = 'FileItem' AND column_name = 'folderId'
|
||||||
)
|
) AS ok
|
||||||
""")
|
""")
|
||||||
folder_column_exists = cur.fetchone()[0]
|
folder_column_exists = bool(_scalarRow(cur.fetchone()))
|
||||||
|
|
||||||
if not folder_table_exists and not folder_column_exists:
|
if not folder_table_exists and not folder_column_exists:
|
||||||
logger.info("FileFolder table and FileItem.folderId column not found — migration already applied or not needed.")
|
logger.info("FileFolder table and FileItem.folderId column not found — migration already applied or not needed.")
|
||||||
|
|
@ -126,7 +147,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||||
})
|
})
|
||||||
logger.info(f"Loaded folders for {len(folders_by_user)} (user, mandate) combinations")
|
logger.info(f"Loaded folders for {len(folders_by_user)} (user, mandate) combinations")
|
||||||
|
|
||||||
# ── 3. Load file→folder assignments ──────────────────────────────────────
|
# ── 3. Load file to folder assignments ────────────────────────────────────
|
||||||
files_by_key: dict = {}
|
files_by_key: dict = {}
|
||||||
if folder_column_exists:
|
if folder_column_exists:
|
||||||
cur.execute(
|
cur.execute(
|
||||||
|
|
@ -139,7 +160,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||||
total_files = sum(
|
total_files = sum(
|
||||||
sum(len(v) for v in d.values()) for d in files_by_key.values()
|
sum(len(v) for v in d.values()) for d in files_by_key.values()
|
||||||
)
|
)
|
||||||
logger.info(f"Found {total_files} file→folder assignments across {len(files_by_key)} (user, mandate) combos")
|
logger.info(f"Found {total_files} file to folder assignments across {len(files_by_key)} (user, mandate) combos")
|
||||||
|
|
||||||
# ── 4. Combine and upsert groupings ──────────────────────────────────────
|
# ── 4. Combine and upsert groupings ──────────────────────────────────────
|
||||||
all_keys = set(folders_by_user.keys()) | set(files_by_key.keys())
|
all_keys = set(folders_by_user.keys()) | set(files_by_key.keys())
|
||||||
|
|
@ -231,7 +252,7 @@ def run_migration(dry_run: bool = True, verbose: bool = False):
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(description="Migrate FileFolder tree to table_groupings")
|
parser = argparse.ArgumentParser(description="Migrate FileFolder tree to table_groupings (archived script)")
|
||||||
parser.add_argument("--dry-run", action="store_true", default=True, help="Preview only, no DB writes (default)")
|
parser.add_argument("--dry-run", action="store_true", default=True, help="Preview only, no DB writes (default)")
|
||||||
parser.add_argument("--execute", action="store_true", help="Actually write to DB (disables dry-run)")
|
parser.add_argument("--execute", action="store_true", help="Actually write to DB (disables dry-run)")
|
||||||
parser.add_argument("--verbose", action="store_true", help="Show per-user details")
|
parser.add_argument("--verbose", action="store_true", help="Show per-user details")
|
||||||
|
|
@ -57,8 +57,8 @@ def _svc_for_connection(current_user: User, connection: UserConnection):
|
||||||
services = getServices(current_user, None)
|
services = getServices(current_user, None)
|
||||||
if not services.clickup.setAccessTokenFromConnection(connection):
|
if not services.clickup.setAccessTokenFromConnection(connection):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||||
detail=routeApiMsg("Failed to set ClickUp access token"),
|
detail=routeApiMsg("Failed to set ClickUp access token. Connection may be expired or invalid."),
|
||||||
)
|
)
|
||||||
return services.clickup
|
return services.clickup
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ from modules.auth import limiter, getCurrentUser, getRequestContext, RequestCont
|
||||||
|
|
||||||
# Import interfaces
|
# Import interfaces
|
||||||
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
|
||||||
from modules.datamodels.datamodelFiles import FileItem, FilePreview
|
from modules.datamodels.datamodelFiles import FileItem, FilePreview, FileFolder
|
||||||
from modules.shared.attributeUtils import getModelAttributeDefinitions
|
from modules.shared.attributeUtils import getModelAttributeDefinitions
|
||||||
from modules.datamodels.datamodelUam import User
|
from modules.datamodels.datamodelUam import User
|
||||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
||||||
|
|
@ -72,14 +72,18 @@ def _resolveFileWithScope(currentUser: User, context: RequestContext, fileId: st
|
||||||
return scopedMgmt, fileItem
|
return scopedMgmt, fileItem
|
||||||
|
|
||||||
|
|
||||||
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user):
|
async def _autoIndexFile(fileId: str, fileName: str, mimeType: str, user, *, mandateId: str = None, featureInstanceId: str = None):
|
||||||
"""Background task: pre-scan + extraction + knowledge indexing.
|
"""Background task: pre-scan + extraction + knowledge indexing.
|
||||||
Step 1: Structure Pre-Scan (AI-free) -> FileContentIndex (persisted)
|
Step 1: Structure Pre-Scan (AI-free) -> FileContentIndex (persisted)
|
||||||
Step 2: Content extraction via runExtraction -> ContentParts
|
Step 2: Content extraction via runExtraction -> ContentParts
|
||||||
Step 3: KnowledgeService.requestIngestion -> idempotent chunking + embedding -> Knowledge Store"""
|
Step 3: KnowledgeService.requestIngestion -> idempotent chunking + embedding -> Knowledge Store"""
|
||||||
userId = user.id if hasattr(user, "id") else str(user)
|
userId = user.id if hasattr(user, "id") else str(user)
|
||||||
try:
|
try:
|
||||||
mgmtInterface = interfaceDbManagement.getInterface(user)
|
mgmtInterface = interfaceDbManagement.getInterface(
|
||||||
|
user,
|
||||||
|
mandateId=mandateId or None,
|
||||||
|
featureInstanceId=featureInstanceId or None,
|
||||||
|
)
|
||||||
mgmtInterface.updateFile(fileId, {"status": "processing"})
|
mgmtInterface.updateFile(fileId, {"status": "processing"})
|
||||||
|
|
||||||
rawBytes = mgmtInterface.getFileData(fileId)
|
rawBytes = mgmtInterface.getFileData(fileId)
|
||||||
|
|
@ -250,6 +254,213 @@ router = APIRouter(
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/folders/tree")
|
||||||
|
@limiter.limit("120/minute")
|
||||||
|
def get_folder_tree(
|
||||||
|
request: Request,
|
||||||
|
owner: str = Query("me", description="'me' | 'shared'"),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
o = (owner or "me").strip().lower()
|
||||||
|
if o == "me":
|
||||||
|
return managementInterface.getOwnFolderTree()
|
||||||
|
if o == "shared":
|
||||||
|
return managementInterface.getSharedFolderTree()
|
||||||
|
raise HTTPException(status_code=400, detail="owner must be 'me' or 'shared'")
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"get_folder_tree error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/folders", status_code=status.HTTP_201_CREATED)
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def create_folder(
|
||||||
|
request: Request,
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
name = body.get("name")
|
||||||
|
if not name or not str(name).strip():
|
||||||
|
raise HTTPException(status_code=400, detail="name is required")
|
||||||
|
parentId = body.get("parentId") or None
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.createFolder(str(name).strip(), parentId)
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"create_folder error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/folders/{folderId}")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def rename_folder(
|
||||||
|
request: Request,
|
||||||
|
folderId: str = Path(...),
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
name = body.get("name")
|
||||||
|
if not name or not str(name).strip():
|
||||||
|
raise HTTPException(status_code=400, detail="name is required")
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.renameFolder(folderId, str(name).strip())
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"rename_folder error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/folders/{folderId}/move")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def move_folder(
|
||||||
|
request: Request,
|
||||||
|
folderId: str = Path(...),
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
newParentId = body.get("parentId")
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.moveFolder(folderId, newParentId or None)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"move_folder error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/folders/{folderId}")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def delete_folder(
|
||||||
|
request: Request,
|
||||||
|
folderId: str = Path(...),
|
||||||
|
cascade: bool = Query(True, description="Cascade delete sub-folders and files"),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.deleteFolderCascade(folderId)
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"delete_folder error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/folders/{folderId}/scope")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def patch_folder_scope(
|
||||||
|
request: Request,
|
||||||
|
folderId: str = Path(...),
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
scope = body.get("scope")
|
||||||
|
if not scope:
|
||||||
|
raise HTTPException(status_code=400, detail="scope is required")
|
||||||
|
cascadeToFiles = body.get("cascadeToFiles", False)
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.patchFolderScope(folderId, scope, cascadeToFiles)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"patch_folder_scope error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/folders/{folderId}/neutralize")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
def patch_folder_neutralize(
|
||||||
|
request: Request,
|
||||||
|
folderId: str = Path(...),
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
neutralize = body.get("neutralize")
|
||||||
|
if neutralize is None:
|
||||||
|
raise HTTPException(status_code=400, detail="neutralize is required")
|
||||||
|
managementInterface = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
return managementInterface.patchFolderNeutralize(folderId, bool(neutralize))
|
||||||
|
except PermissionError as e:
|
||||||
|
raise HTTPException(status_code=403, detail=str(e))
|
||||||
|
except interfaceDbManagement.FileNotFoundError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e))
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"patch_folder_neutralize error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
@router.get("/list")
|
@router.get("/list")
|
||||||
@limiter.limit("120/minute")
|
@limiter.limit("120/minute")
|
||||||
def get_files(
|
def get_files(
|
||||||
|
|
@ -462,6 +673,8 @@ async def upload_file(
|
||||||
fileName=fileItem.fileName,
|
fileName=fileItem.fileName,
|
||||||
mimeType=fileItem.mimeType,
|
mimeType=fileItem.mimeType,
|
||||||
user=currentUser,
|
user=currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
))
|
))
|
||||||
except Exception as indexErr:
|
except Exception as indexErr:
|
||||||
logger.warning(f"Auto-index trigger failed (non-blocking): {indexErr}")
|
logger.warning(f"Auto-index trigger failed (non-blocking): {indexErr}")
|
||||||
|
|
@ -526,6 +739,110 @@ def batch_delete_items(
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/batch-download")
|
||||||
|
@limiter.limit("10/minute")
|
||||||
|
def batchDownload(
|
||||||
|
request: Request,
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
currentUser: User = Depends(getCurrentUser),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Download multiple files and/or folders as a single ZIP archive,
|
||||||
|
preserving the folder hierarchy as ZIP paths."""
|
||||||
|
import io, zipfile
|
||||||
|
|
||||||
|
fileIds = body.get("fileIds") or []
|
||||||
|
folderIds = body.get("folderIds") or []
|
||||||
|
|
||||||
|
if not fileIds and not folderIds:
|
||||||
|
raise HTTPException(status_code=400, detail="fileIds or folderIds required")
|
||||||
|
|
||||||
|
try:
|
||||||
|
mgmt = interfaceDbManagement.getInterface(
|
||||||
|
currentUser,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
folderCache: dict[str, dict] = {}
|
||||||
|
|
||||||
|
def _getFolder(fid: str):
|
||||||
|
if fid not in folderCache:
|
||||||
|
f = mgmt.getFolder(fid)
|
||||||
|
folderCache[fid] = f if f else {}
|
||||||
|
return folderCache[fid]
|
||||||
|
|
||||||
|
def _folderPath(fid: str) -> str:
|
||||||
|
"""Build the full path for a folder by walking up parentId."""
|
||||||
|
parts: list[str] = []
|
||||||
|
current = fid
|
||||||
|
visited: set[str] = set()
|
||||||
|
while current and current not in visited:
|
||||||
|
visited.add(current)
|
||||||
|
folder = _getFolder(current)
|
||||||
|
if not folder:
|
||||||
|
break
|
||||||
|
parts.append(folder.get("name", current))
|
||||||
|
current = folder.get("parentId")
|
||||||
|
parts.reverse()
|
||||||
|
return "/".join(parts)
|
||||||
|
|
||||||
|
# Collect files from requested folders (recursive)
|
||||||
|
fileEntries: list[tuple[str, str]] = []
|
||||||
|
seenFileIds: set[str] = set()
|
||||||
|
|
||||||
|
for fid in folderIds:
|
||||||
|
childFolderIds = mgmt._collectChildFolderIds(fid)
|
||||||
|
for cfid in childFolderIds:
|
||||||
|
prefix = _folderPath(cfid)
|
||||||
|
items = mgmt.db.getRecordset(FileItem, recordFilter={"folderId": cfid})
|
||||||
|
for item in items:
|
||||||
|
itemId = item.get("id") if isinstance(item, dict) else getattr(item, "id", None)
|
||||||
|
if itemId and itemId not in seenFileIds:
|
||||||
|
seenFileIds.add(itemId)
|
||||||
|
fileEntries.append((itemId, prefix))
|
||||||
|
|
||||||
|
# Loose files (not via folder selection)
|
||||||
|
for fid in fileIds:
|
||||||
|
if fid in seenFileIds:
|
||||||
|
continue
|
||||||
|
seenFileIds.add(fid)
|
||||||
|
fileMeta = mgmt.getFile(fid)
|
||||||
|
if not fileMeta:
|
||||||
|
continue
|
||||||
|
fileFolderId = fileMeta.get("folderId") if isinstance(fileMeta, dict) else getattr(fileMeta, "folderId", None)
|
||||||
|
prefix = _folderPath(fileFolderId) if fileFolderId else ""
|
||||||
|
fileEntries.append((fid, prefix))
|
||||||
|
|
||||||
|
if not fileEntries:
|
||||||
|
raise HTTPException(status_code=404, detail="No downloadable files found")
|
||||||
|
|
||||||
|
buf = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||||
|
for fid, prefix in fileEntries:
|
||||||
|
try:
|
||||||
|
fileMeta = mgmt.getFile(fid)
|
||||||
|
fileData = mgmt.getFileData(fid)
|
||||||
|
if fileMeta and fileData:
|
||||||
|
name = (fileMeta.get("fileName") if isinstance(fileMeta, dict) else getattr(fileMeta, "fileName", fid)) or fid
|
||||||
|
zipPath = f"{prefix}/{name}" if prefix else name
|
||||||
|
zf.writestr(zipPath, fileData)
|
||||||
|
except Exception as fe:
|
||||||
|
logger.warning(f"batch_download: skipping file {fid}: {fe}")
|
||||||
|
buf.seek(0)
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
return StreamingResponse(
|
||||||
|
buf,
|
||||||
|
media_type="application/zip",
|
||||||
|
headers={"Content-Disposition": 'attachment; filename="download.zip"'},
|
||||||
|
)
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"batch_download error: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
# ── Group bulk endpoints ──────────────────────────────────────────────────────
|
# ── Group bulk endpoints ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
def _get_group_item_ids(contextKey: str, groupId: str, appInterface) -> set:
|
def _get_group_item_ids(contextKey: str, groupId: str, appInterface) -> set:
|
||||||
|
|
@ -759,7 +1076,11 @@ def updateFileScope(
|
||||||
|
|
||||||
async def _runReindexAfterScopeChange():
|
async def _runReindexAfterScopeChange():
|
||||||
try:
|
try:
|
||||||
await _autoIndexFile(fileId=fileId, fileName=fn, mimeType=mt, user=context.user)
|
await _autoIndexFile(
|
||||||
|
fileId=fileId, fileName=fn, mimeType=mt, user=context.user,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.warning("Re-index after scope change failed for %s: %s", fileId, ex)
|
logger.warning("Re-index after scope change failed for %s: %s", fileId, ex)
|
||||||
|
|
||||||
|
|
@ -837,7 +1158,11 @@ def updateFileNeutralize(
|
||||||
|
|
||||||
async def _runReindexAfterNeutralizeToggle():
|
async def _runReindexAfterNeutralizeToggle():
|
||||||
try:
|
try:
|
||||||
await _autoIndexFile(fileId=fileId, fileName=fn, mimeType=mt, user=context.user)
|
await _autoIndexFile(
|
||||||
|
fileId=fileId, fileName=fn, mimeType=mt, user=context.user,
|
||||||
|
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||||
|
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger.error("Re-index after neutralize toggle failed for %s: %s (file has NO index until next re-index)", fileId, ex)
|
logger.error("Re-index after neutralize toggle failed for %s: %s (file has NO index until next re-index)", fileId, ex)
|
||||||
|
|
||||||
|
|
@ -909,7 +1234,7 @@ def update_file(
|
||||||
) -> FileItem:
|
) -> FileItem:
|
||||||
"""Update file info"""
|
"""Update file info"""
|
||||||
try:
|
try:
|
||||||
_EDITABLE_FIELDS = {"fileName", "scope", "tags", "description", "neutralize"}
|
_EDITABLE_FIELDS = {"fileName", "folderId", "scope", "tags", "description", "neutralize"}
|
||||||
safeData = {k: v for k, v in file_info.items() if k in _EDITABLE_FIELDS}
|
safeData = {k: v for k, v in file_info.items() if k in _EDITABLE_FIELDS}
|
||||||
if not safeData:
|
if not safeData:
|
||||||
raise HTTPException(status_code=400, detail=routeApiMsg("No editable fields provided"))
|
raise HTTPException(status_code=400, detail=routeApiMsg("No editable fields provided"))
|
||||||
|
|
|
||||||
|
|
@ -128,7 +128,7 @@ async def getSharepointFolderOptionsByReference(
|
||||||
# Set access token on SharePoint service
|
# Set access token on SharePoint service
|
||||||
if not services.sharepoint.setAccessTokenFromConnection(connection):
|
if not services.sharepoint.setAccessTokenFromConnection(connection):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
||||||
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
|
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
"""ActionToolAdapter: wraps existing workflow actions (dynamicMode=True) as agent tools."""
|
"""ActionToolAdapter: wraps existing workflow actions (dynamicMode=True) as agent tools."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, Any, List
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
|
from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
|
||||||
ToolDefinition, ToolResult
|
ToolDefinition, ToolResult
|
||||||
|
|
@ -44,7 +44,7 @@ class ActionToolAdapter:
|
||||||
compoundName = f"{shortName}_{actionName}"
|
compoundName = f"{shortName}_{actionName}"
|
||||||
toolDef = _buildToolDefinition(compoundName, actionDef, actionInfo)
|
toolDef = _buildToolDefinition(compoundName, actionDef, actionInfo)
|
||||||
|
|
||||||
handler = _createDispatchHandler(self._actionExecutor, shortName, actionName)
|
handler = _createDispatchHandler(self._actionExecutor, shortName, actionName, self._actionExecutor.services)
|
||||||
toolRegistry.registerFromDefinition(toolDef, handler)
|
toolRegistry.registerFromDefinition(toolDef, handler)
|
||||||
self._registeredTools.append(compoundName)
|
self._registeredTools.append(compoundName)
|
||||||
registered += 1
|
registered += 1
|
||||||
|
|
@ -186,7 +186,7 @@ def _catalogTypeToJsonSchema(typeStr: str, _depth: int = 0) -> Dict[str, Any]:
|
||||||
return {"type": "string", "description": f"unknown type '{typeStr}' (defaulted to string)"}
|
return {"type": "string", "description": f"unknown type '{typeStr}' (defaulted to string)"}
|
||||||
|
|
||||||
|
|
||||||
def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
def _createDispatchHandler(actionExecutor, methodName: str, actionName: str, services=None):
|
||||||
"""Create an async handler that dispatches to the ActionExecutor.
|
"""Create an async handler that dispatches to the ActionExecutor.
|
||||||
|
|
||||||
Parameter validation and Ref-payload normalization (collapsing
|
Parameter validation and Ref-payload normalization (collapsing
|
||||||
|
|
@ -204,7 +204,7 @@ def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
||||||
if "mandateId" not in args and context.get("mandateId"):
|
if "mandateId" not in args and context.get("mandateId"):
|
||||||
args["mandateId"] = context["mandateId"]
|
args["mandateId"] = context["mandateId"]
|
||||||
result = await actionExecutor.executeAction(methodName, actionName, args)
|
result = await actionExecutor.executeAction(methodName, actionName, args)
|
||||||
data = _formatActionResult(result)
|
data = _formatActionResult(result, services, context)
|
||||||
return ToolResult(
|
return ToolResult(
|
||||||
toolCallId="",
|
toolCallId="",
|
||||||
toolName=f"{methodName}_{actionName}",
|
toolName=f"{methodName}_{actionName}",
|
||||||
|
|
@ -223,9 +223,65 @@ def _createDispatchHandler(actionExecutor, methodName: str, actionName: str):
|
||||||
return _handler
|
return _handler
|
||||||
|
|
||||||
|
|
||||||
def _formatActionResult(result) -> str:
|
_INLINE_CONTENT_LIMIT = 2000
|
||||||
"""Format an ActionResult into a text representation for the agent."""
|
|
||||||
|
|
||||||
|
def _persistLargeDocument(doc, services, context: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""Save an ActionDocument with large content as a workspace file.
|
||||||
|
|
||||||
|
Returns a formatted result line (with file id + docItem ref) or None
|
||||||
|
if persistence is not possible.
|
||||||
|
"""
|
||||||
|
if not services:
|
||||||
|
return None
|
||||||
|
chatService = getattr(services, "chat", None)
|
||||||
|
if not chatService:
|
||||||
|
return None
|
||||||
|
docData = getattr(doc, "documentData", None)
|
||||||
|
if not docData or not isinstance(docData, str):
|
||||||
|
return None
|
||||||
|
docName = getattr(doc, "documentName", "unnamed")
|
||||||
|
docBytes = docData.encode("utf-8")
|
||||||
|
try:
|
||||||
|
fileItem, _ = chatService.interfaceDbComponent.saveUploadedFile(docBytes, docName)
|
||||||
|
fiId = context.get("featureInstanceId") or getattr(services, "featureInstanceId", "")
|
||||||
|
if fiId:
|
||||||
|
chatService.interfaceDbComponent.updateFile(fileItem.id, {"featureInstanceId": fiId})
|
||||||
|
|
||||||
|
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
|
||||||
|
_attachFileAsChatDocument,
|
||||||
|
_formatToolFileResult,
|
||||||
|
_getOrCreateTempFolder,
|
||||||
|
)
|
||||||
|
tempFolderId = _getOrCreateTempFolder(chatService)
|
||||||
|
if tempFolderId:
|
||||||
|
chatService.interfaceDbComponent.updateFile(fileItem.id, {"folderId": tempFolderId})
|
||||||
|
|
||||||
|
chatDocId = _attachFileAsChatDocument(
|
||||||
|
services, fileItem,
|
||||||
|
label=f"action_doc:{docName}",
|
||||||
|
userMessage=f"Action document: {docName}",
|
||||||
|
)
|
||||||
|
return _formatToolFileResult(
|
||||||
|
fileItem=fileItem,
|
||||||
|
chatDocId=chatDocId,
|
||||||
|
actionLabel="Produced",
|
||||||
|
extraInfo="Use readFile to read the content.",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"_persistLargeDocument failed for {docName}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _formatActionResult(result, services=None, context: Optional[Dict[str, Any]] = None) -> str:
|
||||||
|
"""Format an ActionResult into a text representation for the agent.
|
||||||
|
|
||||||
|
Documents whose content exceeds the inline limit are persisted as
|
||||||
|
workspace files so the agent can access them via readFile /
|
||||||
|
ai_process / searchInFileContent.
|
||||||
|
"""
|
||||||
parts = []
|
parts = []
|
||||||
|
ctx = context or {}
|
||||||
|
|
||||||
if result.resultLabel:
|
if result.resultLabel:
|
||||||
parts.append(f"Result: {result.resultLabel}")
|
parts.append(f"Result: {result.resultLabel}")
|
||||||
|
|
@ -238,10 +294,19 @@ def _formatActionResult(result) -> str:
|
||||||
for doc in result.documents:
|
for doc in result.documents:
|
||||||
docName = getattr(doc, "documentName", "unnamed")
|
docName = getattr(doc, "documentName", "unnamed")
|
||||||
docType = getattr(doc, "mimeType", "unknown")
|
docType = getattr(doc, "mimeType", "unknown")
|
||||||
parts.append(f" - {docName} ({docType})")
|
|
||||||
docData = getattr(doc, "documentData", None)
|
docData = getattr(doc, "documentData", None)
|
||||||
if docData and isinstance(docData, str) and len(docData) < 2000:
|
|
||||||
parts.append(f" Content: {docData[:2000]}")
|
isLarge = docData and isinstance(docData, str) and len(docData) >= _INLINE_CONTENT_LIMIT
|
||||||
|
if isLarge:
|
||||||
|
persistedLine = _persistLargeDocument(doc, services, ctx)
|
||||||
|
if persistedLine:
|
||||||
|
parts.append(f" - {docName} ({docType})")
|
||||||
|
parts.append(f" {persistedLine}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
parts.append(f" - {docName} ({docType})")
|
||||||
|
if docData and isinstance(docData, str) and len(docData) < _INLINE_CONTENT_LIMIT:
|
||||||
|
parts.append(f" Content: {docData[:_INLINE_CONTENT_LIMIT]}")
|
||||||
|
|
||||||
if not parts:
|
if not parts:
|
||||||
parts.append("Action completed successfully." if result.success else "Action failed.")
|
parts.append("Action completed successfully." if result.success else "Action failed.")
|
||||||
|
|
|
||||||
|
|
@ -198,7 +198,10 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
|
||||||
|
|
||||||
if isinstance(result, _DR):
|
if isinstance(result, _DR):
|
||||||
fileBytes = result.data
|
fileBytes = result.data
|
||||||
fileName = result.fileName or fileName
|
resolvedName = result.fileName or fileName
|
||||||
|
if resolvedName != fileName:
|
||||||
|
logger.debug(f"downloadFromDataSource: connector fileName={result.fileName!r} overrides arg fileName={fileName!r}")
|
||||||
|
fileName = resolvedName
|
||||||
else:
|
else:
|
||||||
fileBytes = result
|
fileBytes = result
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -836,7 +836,7 @@ def _registerMediaTools(registry: ToolRegistry, services):
|
||||||
return ToolResult(toolCallId="", toolName="executeCode", success=False, error=f"Language '{language}' not supported. Only 'python' is available.")
|
return ToolResult(toolCallId="", toolName="executeCode", success=False, error=f"Language '{language}' not supported. Only 'python' is available.")
|
||||||
try:
|
try:
|
||||||
from modules.serviceCenter.services.serviceAgent.sandboxExecutor import executePython
|
from modules.serviceCenter.services.serviceAgent.sandboxExecutor import executePython
|
||||||
result = await executePython(code)
|
result = await executePython(code, services=services)
|
||||||
if result.get("success"):
|
if result.get("success"):
|
||||||
output = result.get("output", "(no output)")
|
output = result.get("output", "(no output)")
|
||||||
return ToolResult(toolCallId="", toolName="executeCode", success=True, data=output)
|
return ToolResult(toolCallId="", toolName="executeCode", success=True, data=output)
|
||||||
|
|
@ -886,12 +886,17 @@ def _registerMediaTools(registry: ToolRegistry, services):
|
||||||
readOnly=True
|
readOnly=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from modules.serviceCenter.services.serviceAgent.sandboxExecutor import SANDBOX_ALLOWED_MODULES
|
||||||
|
moduleList = ", ".join(sorted(SANDBOX_ALLOWED_MODULES | {"io"}))
|
||||||
registry.register(
|
registry.register(
|
||||||
"executeCode", _executeCode,
|
"executeCode", _executeCode,
|
||||||
description=(
|
description=(
|
||||||
"Execute Python code in a sandboxed environment for calculations and data analysis. "
|
f"Execute Python code in a sandboxed environment for calculations and data analysis. "
|
||||||
"Available modules: math, statistics, json, csv, re, datetime, collections, itertools, functools, decimal, fractions, random. "
|
f"Available modules: {moduleList}. "
|
||||||
"No file system, network, or OS access. Max 30s execution time. "
|
"io is restricted to StringIO and BytesIO only (no file access). "
|
||||||
|
"Built-in readFile(fileId) returns UTF-8 content of a workspace file by its file ID "
|
||||||
|
"(use the 'file id' from tool outputs, e.g. data = readFile('019af...')). "
|
||||||
|
"No other file system, network, or OS access. Max 30s execution time. "
|
||||||
"Use print() to produce output."
|
"Use print() to produce output."
|
||||||
),
|
),
|
||||||
parameters={
|
parameters={
|
||||||
|
|
|
||||||
|
|
@ -69,7 +69,15 @@ class _ServicesAdapter:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workflow(self):
|
def workflow(self):
|
||||||
return self._context.workflow
|
return getattr(self, "_workflow_override", None) or self._context.workflow
|
||||||
|
|
||||||
|
@workflow.setter
|
||||||
|
def workflow(self, value):
|
||||||
|
self._workflow_override = value
|
||||||
|
try:
|
||||||
|
self._context.workflow = value
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ai(self):
|
def ai(self):
|
||||||
|
|
@ -95,6 +103,13 @@ class _ServicesAdapter:
|
||||||
def extraction(self):
|
def extraction(self):
|
||||||
return self._getService("extraction")
|
return self._getService("extraction")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def interfaceDbComponent(self):
|
||||||
|
try:
|
||||||
|
return self.chat.interfaceDbComponent
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rbac(self):
|
def rbac(self):
|
||||||
"""Same RbacClass as workflow hub (MethodBase permission checks during discoverMethods)."""
|
"""Same RbacClass as workflow hub (MethodBase permission checks during discoverMethods)."""
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@ from typing import Dict, Any
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_PYTHON_ALLOWED_MODULES = {
|
SANDBOX_ALLOWED_MODULES = {
|
||||||
"math", "statistics", "json", "csv", "re", "datetime",
|
"math", "statistics", "json", "csv", "re", "datetime", "time",
|
||||||
"collections", "itertools", "functools", "decimal", "fractions",
|
"collections", "itertools", "functools", "decimal", "fractions",
|
||||||
"random", "string", "textwrap", "operator", "copy",
|
"random", "string", "textwrap", "operator", "copy",
|
||||||
}
|
}
|
||||||
|
|
@ -19,17 +19,33 @@ _PYTHON_ALLOWED_MODULES = {
|
||||||
_PYTHON_BLOCKED_BUILTINS = {
|
_PYTHON_BLOCKED_BUILTINS = {
|
||||||
"open", "exec", "eval", "compile", "__import__", "globals", "locals",
|
"open", "exec", "eval", "compile", "__import__", "globals", "locals",
|
||||||
"getattr", "setattr", "delattr", "breakpoint", "exit", "quit",
|
"getattr", "setattr", "delattr", "breakpoint", "exit", "quit",
|
||||||
"input", "memoryview", "type",
|
"input", "memoryview",
|
||||||
}
|
}
|
||||||
|
|
||||||
_MAX_EXECUTION_TIME_S = 30
|
_MAX_EXECUTION_TIME_S = 30
|
||||||
_MAX_OUTPUT_CHARS = 50000
|
_MAX_OUTPUT_CHARS = 50000
|
||||||
|
|
||||||
|
|
||||||
|
_RESTRICTED_IO = None
|
||||||
|
|
||||||
|
def _getRestrictedIo():
|
||||||
|
"""Return a restricted ``io`` module exposing only StringIO/BytesIO."""
|
||||||
|
global _RESTRICTED_IO
|
||||||
|
if _RESTRICTED_IO is None:
|
||||||
|
import types
|
||||||
|
m = types.ModuleType("io")
|
||||||
|
m.StringIO = io.StringIO
|
||||||
|
m.BytesIO = io.BytesIO
|
||||||
|
_RESTRICTED_IO = m
|
||||||
|
return _RESTRICTED_IO
|
||||||
|
|
||||||
|
|
||||||
def _safeImport(name, *args, **kwargs):
|
def _safeImport(name, *args, **kwargs):
|
||||||
"""Restricted import that only allows whitelisted modules."""
|
"""Restricted import that only allows whitelisted modules."""
|
||||||
if name not in _PYTHON_ALLOWED_MODULES:
|
if name == "io":
|
||||||
raise ImportError(f"Module '{name}' is not allowed. Permitted: {', '.join(sorted(_PYTHON_ALLOWED_MODULES))}")
|
return _getRestrictedIo()
|
||||||
|
if name not in SANDBOX_ALLOWED_MODULES:
|
||||||
|
raise ImportError(f"Module '{name}' is not allowed. Permitted: io (StringIO/BytesIO only), {', '.join(sorted(SANDBOX_ALLOWED_MODULES))}")
|
||||||
return __builtins__["__import__"](name, *args, **kwargs) if isinstance(__builtins__, dict) else __import__(name, *args, **kwargs)
|
return __builtins__["__import__"](name, *args, **kwargs) if isinstance(__builtins__, dict) else __import__(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -48,7 +64,7 @@ def _buildRestrictedGlobals() -> Dict[str, Any]:
|
||||||
safeBuiltins["__name__"] = "__sandbox__"
|
safeBuiltins["__name__"] = "__sandbox__"
|
||||||
safeBuiltins["__builtins__"] = safeBuiltins
|
safeBuiltins["__builtins__"] = safeBuiltins
|
||||||
|
|
||||||
for modName in _PYTHON_ALLOWED_MODULES:
|
for modName in SANDBOX_ALLOWED_MODULES:
|
||||||
try:
|
try:
|
||||||
safeBuiltins[modName] = __import__(modName)
|
safeBuiltins[modName] = __import__(modName)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
@ -57,12 +73,27 @@ def _buildRestrictedGlobals() -> Dict[str, Any]:
|
||||||
return {"__builtins__": safeBuiltins}
|
return {"__builtins__": safeBuiltins}
|
||||||
|
|
||||||
|
|
||||||
async def executePython(code: str) -> Dict[str, Any]:
|
def _makeReadFile(services):
|
||||||
|
"""Create a readFile(fileId) closure bound to the current services context."""
|
||||||
|
def readFile(fileId: str) -> str:
|
||||||
|
mgmt = getattr(services, 'interfaceDbComponent', None) if services else None
|
||||||
|
if not mgmt:
|
||||||
|
raise RuntimeError("readFile: no file store available in this session")
|
||||||
|
data = mgmt.getFileData(str(fileId))
|
||||||
|
if data is None:
|
||||||
|
raise FileNotFoundError(f"File '{fileId}' not found in workspace")
|
||||||
|
return data.decode("utf-8")
|
||||||
|
return readFile
|
||||||
|
|
||||||
|
|
||||||
|
async def executePython(code: str, *, services=None) -> Dict[str, Any]:
|
||||||
"""Execute Python code in a restricted sandbox. Returns {success, output, error}."""
|
"""Execute Python code in a restricted sandbox. Returns {success, output, error}."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
def _run():
|
def _run():
|
||||||
restrictedGlobals = _buildRestrictedGlobals()
|
restrictedGlobals = _buildRestrictedGlobals()
|
||||||
|
if services:
|
||||||
|
restrictedGlobals["__builtins__"]["readFile"] = _makeReadFile(services)
|
||||||
capturedOutput = io.StringIO()
|
capturedOutput = io.StringIO()
|
||||||
oldStdout = sys.stdout
|
oldStdout = sys.stdout
|
||||||
oldStderr = sys.stderr
|
oldStderr = sys.stderr
|
||||||
|
|
|
||||||
|
|
@ -166,12 +166,28 @@ class ClickupService:
|
||||||
page: int = 0,
|
page: int = 0,
|
||||||
include_closed: bool = False,
|
include_closed: bool = False,
|
||||||
subtasks: bool = True,
|
subtasks: bool = True,
|
||||||
|
dateCreatedGt: Optional[int] = None,
|
||||||
|
dateCreatedLt: Optional[int] = None,
|
||||||
|
dateUpdatedGt: Optional[int] = None,
|
||||||
|
dateUpdatedLt: Optional[int] = None,
|
||||||
|
customFields: Optional[List[Dict[str, Any]]] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
params: Dict[str, Any] = {
|
params: Dict[str, Any] = {
|
||||||
"page": page,
|
"page": page,
|
||||||
"subtasks": str(subtasks).lower(),
|
"subtasks": str(subtasks).lower(),
|
||||||
"include_closed": str(include_closed).lower(),
|
"include_closed": str(include_closed).lower(),
|
||||||
}
|
}
|
||||||
|
if dateCreatedGt is not None:
|
||||||
|
params["date_created_gt"] = dateCreatedGt
|
||||||
|
if dateCreatedLt is not None:
|
||||||
|
params["date_created_lt"] = dateCreatedLt
|
||||||
|
if dateUpdatedGt is not None:
|
||||||
|
params["date_updated_gt"] = dateUpdatedGt
|
||||||
|
if dateUpdatedLt is not None:
|
||||||
|
params["date_updated_lt"] = dateUpdatedLt
|
||||||
|
if customFields:
|
||||||
|
import json as _json
|
||||||
|
params["custom_fields"] = _json.dumps(customFields)
|
||||||
return await self._request("GET", f"/list/{list_id}/task", params=params)
|
return await self._request("GET", f"/list/{list_id}/task", params=params)
|
||||||
|
|
||||||
async def getTask(self, task_id: str, *, include_subtasks: bool = True) -> Dict[str, Any]:
|
async def getTask(self, task_id: str, *, include_subtasks: bool = True) -> Dict[str, Any]:
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@ class RendererCodeCsv(BaseCodeRenderer):
|
||||||
|
|
||||||
return renderedDocs
|
return renderedDocs
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""
|
"""
|
||||||
Render method for document generation compatibility.
|
Render method for document generation compatibility.
|
||||||
Delegates to document renderer if needed, or handles code files directly.
|
Delegates to document renderer if needed, or handles code files directly.
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ class RendererCodeJson(BaseCodeRenderer):
|
||||||
|
|
||||||
return renderedDocs
|
return renderedDocs
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""
|
"""
|
||||||
Render method for document generation compatibility.
|
Render method for document generation compatibility.
|
||||||
Delegates to document renderer if needed, or handles code files directly.
|
Delegates to document renderer if needed, or handles code files directly.
|
||||||
|
|
|
||||||
|
|
@ -78,7 +78,7 @@ class RendererCodeXml(BaseCodeRenderer):
|
||||||
|
|
||||||
return renderedDocs
|
return renderedDocs
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""
|
"""
|
||||||
Render method for document generation compatibility.
|
Render method for document generation compatibility.
|
||||||
For XML, we only support code generation (no document renderer exists yet).
|
For XML, we only support code generation (no document renderer exists yet).
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ class RendererCsv(BaseRenderer):
|
||||||
"""
|
"""
|
||||||
return ["table", "code_block"]
|
return ["table", "code_block"]
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""Render extracted JSON content to CSV format. Produces one CSV file per table section."""
|
"""Render extracted JSON content to CSV format. Produces one CSV file per table section."""
|
||||||
try:
|
try:
|
||||||
# Validate JSON structure
|
# Validate JSON structure
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ class RendererImage(BaseRenderer):
|
||||||
"""
|
"""
|
||||||
return ["image"]
|
return ["image"]
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""Render extracted JSON content to image format using AI image generation."""
|
"""Render extracted JSON content to image format using AI image generation."""
|
||||||
try:
|
try:
|
||||||
# Generate AI image from content
|
# Generate AI image from content
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ class RendererJson(BaseRenderer):
|
||||||
# Return all types except image
|
# Return all types except image
|
||||||
return [st for st in supportedSectionTypes if st != "image"]
|
return [st for st in supportedSectionTypes if st != "image"]
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""Render extracted JSON content to JSON format."""
|
"""Render extracted JSON content to JSON format."""
|
||||||
try:
|
try:
|
||||||
# The extracted content should already be JSON from the AI
|
# The extracted content should already be JSON from the AI
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ class RendererMarkdown(BaseRenderer):
|
||||||
from modules.datamodels.datamodelJson import supportedSectionTypes
|
from modules.datamodels.datamodelJson import supportedSectionTypes
|
||||||
return [st for st in supportedSectionTypes if st != "image"]
|
return [st for st in supportedSectionTypes if st != "image"]
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""Render extracted JSON content to Markdown format."""
|
"""Render extracted JSON content to Markdown format."""
|
||||||
try:
|
try:
|
||||||
# Generate markdown from JSON structure
|
# Generate markdown from JSON structure
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ class RendererText(BaseRenderer):
|
||||||
# Text renderer accepts all types except images
|
# Text renderer accepts all types except images
|
||||||
return [st for st in supportedSectionTypes if st != "image"]
|
return [st for st in supportedSectionTypes if st != "image"]
|
||||||
|
|
||||||
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None) -> List[RenderedDocument]:
|
async def render(self, extractedContent: Dict[str, Any], title: str, userPrompt: str = None, aiService=None, *, style: Dict[str, Any] = None) -> List[RenderedDocument]:
|
||||||
"""Render extracted JSON content to plain text format."""
|
"""Render extracted JSON content to plain text format."""
|
||||||
try:
|
try:
|
||||||
# Generate text from JSON structure
|
# Generate text from JSON structure
|
||||||
|
|
|
||||||
|
|
@ -75,8 +75,10 @@ def _action_docs_to_content_parts(services, docs: List[Any]) -> List[ContentPart
|
||||||
|
|
||||||
def _resolve_file_refs_to_content_parts(services, fileIdRefs) -> List[ContentPart]:
|
def _resolve_file_refs_to_content_parts(services, fileIdRefs) -> List[ContentPart]:
|
||||||
"""Fetch files by ID from the file store and extract content.
|
"""Fetch files by ID from the file store and extract content.
|
||||||
Used for automation2 workflows where documents are file-store references,
|
Used ONLY for automation2 workflows where documents are file-store
|
||||||
not chat message attachments."""
|
references, not chat message attachments. In the agent/chat context,
|
||||||
|
``DocumentItemReference`` holds ChatDocument IDs that must be resolved
|
||||||
|
via ``getChatDocumentsFromDocumentList`` instead."""
|
||||||
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy
|
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy
|
||||||
|
|
||||||
mgmt = getattr(services, 'interfaceDbComponent', None)
|
mgmt = getattr(services, 'interfaceDbComponent', None)
|
||||||
|
|
@ -171,16 +173,24 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||||
f"to DocumentReferenceList with {len(documentList.references)} references"
|
f"to DocumentReferenceList with {len(documentList.references)} references"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Resolve DocumentItemReferences (file-ID refs from automation2) directly
|
# DocumentItemReferences carry either file-store IDs (automation2)
|
||||||
# from the file store. These cannot be resolved via chat messages.
|
# or ChatDocument IDs (agent context with docItem: refs).
|
||||||
|
# Route based on context: if a chat workflow with messages exists,
|
||||||
|
# let getChatDocumentsFromDocumentList handle them (it resolves
|
||||||
|
# docItem:uuid via workflow.messages). Otherwise fall through to
|
||||||
|
# the file-store path for automation2.
|
||||||
from modules.datamodels.datamodelDocref import DocumentItemReference
|
from modules.datamodels.datamodelDocref import DocumentItemReference
|
||||||
fileIdRefs = [r for r in documentList.references if isinstance(r, DocumentItemReference)]
|
fileIdRefs = [r for r in documentList.references if isinstance(r, DocumentItemReference)]
|
||||||
if fileIdRefs:
|
if fileIdRefs:
|
||||||
extractedParts = _resolve_file_refs_to_content_parts(self.services, fileIdRefs)
|
chatService = getattr(self.services, 'chat', None)
|
||||||
if extractedParts:
|
workflow = getattr(chatService, '_workflow', None) if chatService else None
|
||||||
inline_content_parts = (inline_content_parts or []) + extractedParts
|
hasChatContext = workflow and getattr(workflow, 'messages', None)
|
||||||
remaining = [r for r in documentList.references if not isinstance(r, DocumentItemReference)]
|
if not hasChatContext:
|
||||||
documentList = DocumentReferenceList(references=remaining)
|
extractedParts = _resolve_file_refs_to_content_parts(self.services, fileIdRefs)
|
||||||
|
if extractedParts:
|
||||||
|
inline_content_parts = (inline_content_parts or []) + extractedParts
|
||||||
|
remaining = [r for r in documentList.references if not isinstance(r, DocumentItemReference)]
|
||||||
|
documentList = DocumentReferenceList(references=remaining)
|
||||||
|
|
||||||
# Optional: if omitted, formats determined from prompt. Default "txt" is validation fallback only.
|
# Optional: if omitted, formats determined from prompt. Default "txt" is validation fallback only.
|
||||||
resultType = parameters.get("resultType")
|
resultType = parameters.get("resultType")
|
||||||
|
|
|
||||||
|
|
@ -31,8 +31,30 @@ async def list_tasks(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||||
|
|
||||||
page = int(parameters.get("page") or 0)
|
page = int(parameters.get("page") or 0)
|
||||||
include_closed = bool(parameters.get("includeClosed", False))
|
include_closed = bool(parameters.get("includeClosed", False))
|
||||||
|
|
||||||
|
dateFilters = {}
|
||||||
|
for key in ("dateCreatedGt", "dateCreatedLt", "dateUpdatedGt", "dateUpdatedLt"):
|
||||||
|
val = parameters.get(key)
|
||||||
|
if val is not None and str(val).strip():
|
||||||
|
try:
|
||||||
|
dateFilters[key] = int(val)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
rawCustomFields = parameters.get("customFields")
|
||||||
|
customFields = None
|
||||||
|
if rawCustomFields:
|
||||||
|
if isinstance(rawCustomFields, str):
|
||||||
|
try:
|
||||||
|
customFields = json.loads(rawCustomFields)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return ActionResult.isFailure(error="customFields must be valid JSON array")
|
||||||
|
elif isinstance(rawCustomFields, list):
|
||||||
|
customFields = rawCustomFields
|
||||||
|
|
||||||
data = await self.services.clickup.getTasksInList(
|
data = await self.services.clickup.getTasksInList(
|
||||||
list_id, page=page, include_closed=include_closed, subtasks=True
|
list_id, page=page, include_closed=include_closed, subtasks=True,
|
||||||
|
**dateFilters, customFields=customFields,
|
||||||
)
|
)
|
||||||
if isinstance(data, dict) and data.get("error"):
|
if isinstance(data, dict) and data.get("error"):
|
||||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||||
|
|
|
||||||
|
|
@ -66,6 +66,41 @@ class MethodClickup(MethodBase):
|
||||||
default=False,
|
default=False,
|
||||||
description="Include closed tasks",
|
description="Include closed tasks",
|
||||||
),
|
),
|
||||||
|
"dateCreatedGt": WorkflowActionParameter(
|
||||||
|
name="dateCreatedGt",
|
||||||
|
type="int",
|
||||||
|
frontendType=FrontendType.NUMBER,
|
||||||
|
required=False,
|
||||||
|
description="Filter: created after this Unix ms timestamp",
|
||||||
|
),
|
||||||
|
"dateCreatedLt": WorkflowActionParameter(
|
||||||
|
name="dateCreatedLt",
|
||||||
|
type="int",
|
||||||
|
frontendType=FrontendType.NUMBER,
|
||||||
|
required=False,
|
||||||
|
description="Filter: created before this Unix ms timestamp",
|
||||||
|
),
|
||||||
|
"dateUpdatedGt": WorkflowActionParameter(
|
||||||
|
name="dateUpdatedGt",
|
||||||
|
type="int",
|
||||||
|
frontendType=FrontendType.NUMBER,
|
||||||
|
required=False,
|
||||||
|
description="Filter: updated after this Unix ms timestamp",
|
||||||
|
),
|
||||||
|
"dateUpdatedLt": WorkflowActionParameter(
|
||||||
|
name="dateUpdatedLt",
|
||||||
|
type="int",
|
||||||
|
frontendType=FrontendType.NUMBER,
|
||||||
|
required=False,
|
||||||
|
description="Filter: updated before this Unix ms timestamp",
|
||||||
|
),
|
||||||
|
"customFields": WorkflowActionParameter(
|
||||||
|
name="customFields",
|
||||||
|
type="str",
|
||||||
|
frontendType=FrontendType.TEXTAREA,
|
||||||
|
required=False,
|
||||||
|
description='JSON array of custom field filters per ClickUp API, e.g. [{"field_id":"abc","operator":"=","value":"123"}]',
|
||||||
|
),
|
||||||
},
|
},
|
||||||
execute=list_tasks.__get__(self, self.__class__),
|
execute=list_tasks.__get__(self, self.__class__),
|
||||||
),
|
),
|
||||||
|
|
|
||||||
58
scripts/stage0_filefolder_schema_check.py
Normal file
58
scripts/stage0_filefolder_schema_check.py
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
"""Stage 0: verify FileFolder table + FileItem.folderId column in management DB.
|
||||||
|
|
||||||
|
Run from the gateway directory (same as uvicorn):
|
||||||
|
python -m scripts.stage0_filefolder_schema_check
|
||||||
|
"""
|
||||||
|
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||||
|
from modules.shared.configuration import APP_CONFIG
|
||||||
|
|
||||||
|
managementDatabase = "poweron_management"
|
||||||
|
|
||||||
|
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||||
|
dbUser = APP_CONFIG.get("DB_USER")
|
||||||
|
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||||
|
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||||
|
|
||||||
|
c = getCachedConnector(
|
||||||
|
dbHost=dbHost,
|
||||||
|
dbDatabase=managementDatabase,
|
||||||
|
dbUser=dbUser,
|
||||||
|
dbPassword=dbPassword,
|
||||||
|
dbPort=dbPort,
|
||||||
|
userId=None,
|
||||||
|
)
|
||||||
|
if not c or not c.connection:
|
||||||
|
print("STAGE0: DB_CONNECTION=none (check config.ini / .env)")
|
||||||
|
raise SystemExit(2)
|
||||||
|
|
||||||
|
cur = c.connection.cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def _scalar(cur):
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
if isinstance(row, dict):
|
||||||
|
return next(iter(row.values()))
|
||||||
|
return row[0]
|
||||||
|
|
||||||
|
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.tables
|
||||||
|
WHERE table_name = 'FileFolder'
|
||||||
|
) AS ok
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
print("STAGE0: FileFolder_table=", _scalar(cur))
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT 1 FROM information_schema.columns
|
||||||
|
WHERE table_name = 'FileItem' AND column_name = 'folderId'
|
||||||
|
) AS ok
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
print("STAGE0: FileItem_folderId_column=", _scalar(cur))
|
||||||
|
cur.close()
|
||||||
327
tests/unit/interfaces/test_folderRbac.py
Normal file
327
tests/unit/interfaces/test_folderRbac.py
Normal file
|
|
@ -0,0 +1,327 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Unit tests for folder RBAC two-user matrix (ownership & scope visibility)."""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, patch, MagicMock
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelFiles import FileFolder, FileItem
|
||||||
|
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
|
||||||
|
from modules.interfaces.interfaceDbManagement import ComponentObjects, FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
_MANDATE_ID = "mandate-test-1"
|
||||||
|
_FEATURE_INSTANCE_ID = "fi-test-1"
|
||||||
|
_USER_A = "user-a-id"
|
||||||
|
_USER_B = "user-b-id"
|
||||||
|
|
||||||
|
|
||||||
|
# ── Fakes & helpers ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class _FakeDb:
|
||||||
|
"""In-memory database mock."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._tables: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||||
|
self.connection = MagicMock()
|
||||||
|
|
||||||
|
def getRecordset(self, modelClass, recordFilter=None):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
records = list(self._tables.get(tableName, {}).values())
|
||||||
|
if not recordFilter:
|
||||||
|
return records
|
||||||
|
return [
|
||||||
|
r for r in records
|
||||||
|
if all(r.get(k) == v for k, v in recordFilter.items())
|
||||||
|
]
|
||||||
|
|
||||||
|
def recordCreate(self, modelClass, data):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
self._tables.setdefault(tableName, {})
|
||||||
|
rec = data.model_dump() if hasattr(data, "model_dump") else dict(data)
|
||||||
|
rec.setdefault("id", str(uuid.uuid4()))
|
||||||
|
self._tables[tableName][rec["id"]] = rec
|
||||||
|
return rec
|
||||||
|
|
||||||
|
def recordModify(self, modelClass, recordId, updates):
|
||||||
|
tbl = self._tables.get(modelClass.__name__, {})
|
||||||
|
if recordId in tbl:
|
||||||
|
tbl[recordId].update(updates)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def recordDelete(self, modelClass, recordId):
|
||||||
|
tbl = self._tables.get(modelClass.__name__, {})
|
||||||
|
if recordId in tbl:
|
||||||
|
del tbl[recordId]
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def updateContext(self, userId):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _ensure_connection(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _ensureTableExists(self, modelClass):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def seed(self, modelClass, record: Dict[str, Any]):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
self._tables.setdefault(tableName, {})
|
||||||
|
self._tables[tableName][record["id"]] = dict(record)
|
||||||
|
|
||||||
|
|
||||||
|
def _makeUser(userId, username="testuser"):
|
||||||
|
return User(id=userId, username=username, language="en")
|
||||||
|
|
||||||
|
|
||||||
|
def _makeRbac(
|
||||||
|
createLevel=AccessLevel.ALL,
|
||||||
|
readLevel=AccessLevel.ALL,
|
||||||
|
updateLevel=AccessLevel.MY,
|
||||||
|
deleteLevel=AccessLevel.MY,
|
||||||
|
):
|
||||||
|
"""Default: regular user can read all, but write only own records."""
|
||||||
|
rbac = Mock()
|
||||||
|
perms = UserPermissions(
|
||||||
|
view=True,
|
||||||
|
read=readLevel,
|
||||||
|
create=createLevel,
|
||||||
|
update=updateLevel,
|
||||||
|
delete=deleteLevel,
|
||||||
|
)
|
||||||
|
rbac.getUserPermissions.return_value = perms
|
||||||
|
return rbac
|
||||||
|
|
||||||
|
|
||||||
|
def _buildComponent(userId, fakeDb, rbac=None):
|
||||||
|
with patch.object(ComponentObjects, "__init__", lambda self: None):
|
||||||
|
comp = ComponentObjects()
|
||||||
|
comp.db = fakeDb
|
||||||
|
comp.currentUser = _makeUser(userId)
|
||||||
|
comp.userId = userId
|
||||||
|
comp.mandateId = _MANDATE_ID
|
||||||
|
comp.featureInstanceId = _FEATURE_INSTANCE_ID
|
||||||
|
comp.rbac = rbac or _makeRbac()
|
||||||
|
comp.userLanguage = "en"
|
||||||
|
return comp
|
||||||
|
|
||||||
|
|
||||||
|
def _makeFolder(
|
||||||
|
folderId=None, name="Folder", parentId=None,
|
||||||
|
userId=_USER_A, scope="personal", neutralize=False,
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"id": folderId or str(uuid.uuid4()),
|
||||||
|
"name": name,
|
||||||
|
"parentId": parentId,
|
||||||
|
"mandateId": _MANDATE_ID,
|
||||||
|
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||||
|
"scope": scope,
|
||||||
|
"neutralize": neutralize,
|
||||||
|
"sysCreatedBy": userId,
|
||||||
|
"sysCreatedAt": 1700000000.0,
|
||||||
|
"sysModifiedAt": 1700000000.0,
|
||||||
|
"sysModifiedBy": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _makeFile(fileId=None, folderId=None, userId=_USER_A, scope="personal"):
|
||||||
|
return {
|
||||||
|
"id": fileId or str(uuid.uuid4()),
|
||||||
|
"fileName": "test.txt",
|
||||||
|
"mimeType": "text/plain",
|
||||||
|
"fileHash": "abc123",
|
||||||
|
"fileSize": 100,
|
||||||
|
"folderId": folderId,
|
||||||
|
"mandateId": _MANDATE_ID,
|
||||||
|
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||||
|
"scope": scope,
|
||||||
|
"neutralize": False,
|
||||||
|
"sysCreatedBy": userId,
|
||||||
|
"sysCreatedAt": 1700000000.0,
|
||||||
|
"sysModifiedAt": 1700000000.0,
|
||||||
|
"sysModifiedBy": None,
|
||||||
|
"tags": None,
|
||||||
|
"description": None,
|
||||||
|
"status": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _scopeAwareMock(fakeDb):
|
||||||
|
"""Side-effect for getRecordsetWithRBAC that simulates scope-based visibility.
|
||||||
|
|
||||||
|
Visibility rules:
|
||||||
|
- Owner (sysCreatedBy == currentUser.id) always sees the record
|
||||||
|
- scope='global' -> visible to everyone
|
||||||
|
- scope='mandate' -> visible when mandateId matches
|
||||||
|
- scope='featureInstance' -> visible when featureInstanceId matches
|
||||||
|
- scope='personal' -> owner only (already covered above)
|
||||||
|
"""
|
||||||
|
def _fn(connector, modelClass, currentUser, recordFilter=None, **kwargs):
|
||||||
|
requestMandateId = kwargs.get("mandateId", _MANDATE_ID)
|
||||||
|
requestFiId = kwargs.get("featureInstanceId", _FEATURE_INSTANCE_ID)
|
||||||
|
allRecords = fakeDb.getRecordset(modelClass, recordFilter=recordFilter)
|
||||||
|
visible = []
|
||||||
|
for rec in allRecords:
|
||||||
|
if rec.get("sysCreatedBy") == currentUser.id:
|
||||||
|
visible.append(rec)
|
||||||
|
continue
|
||||||
|
scope = rec.get("scope", "personal")
|
||||||
|
if scope == "global":
|
||||||
|
visible.append(rec)
|
||||||
|
elif scope == "mandate" and rec.get("mandateId") == requestMandateId:
|
||||||
|
visible.append(rec)
|
||||||
|
elif scope == "featureInstance" and rec.get("featureInstanceId") == requestFiId:
|
||||||
|
visible.append(rec)
|
||||||
|
return visible
|
||||||
|
return _fn
|
||||||
|
|
||||||
|
|
||||||
|
# ── Test class ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@patch("modules.interfaces.interfaceDbManagement.getRecordsetWithRBAC")
|
||||||
|
class TestFolderRbac:
|
||||||
|
"""Two-user matrix: ownership, scope visibility, and write-access guards."""
|
||||||
|
|
||||||
|
# ── 1. Ownership visibility ───────────────────────────────────────────
|
||||||
|
|
||||||
|
def testUserAFolderInOwnTreeNotInUserBOwnTree(self, mockRbacGet):
|
||||||
|
"""User A's personal folder appears in A's own tree, not in B's."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", name="A-Folder", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compA = _buildComponent(_USER_A, fakeDb)
|
||||||
|
ownA = compA.getOwnFolderTree()
|
||||||
|
assert any(f["id"] == "fa-1" for f in ownA)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
ownB = compB.getOwnFolderTree()
|
||||||
|
assert not any(f["id"] == "fa-1" for f in ownB)
|
||||||
|
|
||||||
|
# ── 2. Scope change -> shared visibility ──────────────────────────────
|
||||||
|
|
||||||
|
def testScopeChangeToMandateMakesVisibleToUserB(self, mockRbacGet):
|
||||||
|
"""Changing scope from personal to mandate makes the folder appear
|
||||||
|
in User B's shared tree."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="personal", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
sharedBefore = compB.getSharedFolderTree()
|
||||||
|
assert not any(f["id"] == "fa-1" for f in sharedBefore)
|
||||||
|
|
||||||
|
fakeDb.recordModify(FileFolder, "fa-1", {"scope": "mandate"})
|
||||||
|
|
||||||
|
sharedAfter = compB.getSharedFolderTree()
|
||||||
|
assert any(f["id"] == "fa-1" for f in sharedAfter)
|
||||||
|
|
||||||
|
# ── 3-7. Non-owner cannot mutate ──────────────────────────────────────
|
||||||
|
|
||||||
|
def testUserBCannotRenameFolderOfUserA(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
compB.renameFolder("fa-1", "Hijacked")
|
||||||
|
|
||||||
|
def testUserBCannotMoveFolderOfUserA(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fb-1", scope="mandate", userId=_USER_B))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
compB.moveFolder("fa-1", "fb-1")
|
||||||
|
|
||||||
|
def testUserBCannotDeleteFolderOfUserA(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
compB.deleteFolderCascade("fa-1")
|
||||||
|
|
||||||
|
def testUserBCannotPatchScopeOnFolderOfUserA(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
compB.patchFolderScope("fa-1", "personal")
|
||||||
|
|
||||||
|
def testUserBCannotPatchNeutralizeOnFolderOfUserA(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="fa-1", scope="mandate", userId=_USER_A))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
with pytest.raises(PermissionError):
|
||||||
|
compB.patchFolderNeutralize("fa-1", True)
|
||||||
|
|
||||||
|
# ── 8. contextOrphan ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testContextOrphanWhenParentFolderNotShared(self, mockRbacGet):
|
||||||
|
"""User A's parent folder is personal, child folder is mandate.
|
||||||
|
User B sees only the child, flagged as contextOrphan."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="parent-f", name="Private Parent", userId=_USER_A, scope="personal",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="child-f", name="Shared Child", userId=_USER_A,
|
||||||
|
parentId="parent-f", scope="mandate",
|
||||||
|
))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
shared = compB.getSharedFolderTree()
|
||||||
|
|
||||||
|
assert len(shared) == 1
|
||||||
|
assert shared[0]["id"] == "child-f"
|
||||||
|
assert shared[0]["contextOrphan"] is True
|
||||||
|
|
||||||
|
# ── 9. Shared folder children visible ─────────────────────────────────
|
||||||
|
|
||||||
|
def testSharedFolderMakesChildrenVisible(self, mockRbacGet):
|
||||||
|
"""When User A shares a folder tree (scope=mandate), all child folders
|
||||||
|
become visible in User B's shared tree."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="root-f", name="Root", userId=_USER_A, scope="mandate",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="child1-f", name="Child 1", userId=_USER_A,
|
||||||
|
parentId="root-f", scope="mandate",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="child2-f", name="Child 2", userId=_USER_A,
|
||||||
|
parentId="root-f", scope="mandate",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="grandchild-f", name="Grandchild", userId=_USER_A,
|
||||||
|
parentId="child1-f", scope="mandate",
|
||||||
|
))
|
||||||
|
mockRbacGet.side_effect = _scopeAwareMock(fakeDb)
|
||||||
|
|
||||||
|
compB = _buildComponent(_USER_B, fakeDb)
|
||||||
|
shared = compB.getSharedFolderTree()
|
||||||
|
|
||||||
|
sharedIds = {f["id"] for f in shared}
|
||||||
|
assert sharedIds == {"root-f", "child1-f", "child2-f", "grandchild-f"}
|
||||||
|
|
||||||
|
byId = {f["id"]: f for f in shared}
|
||||||
|
assert byId["root-f"]["contextOrphan"] is False
|
||||||
|
assert byId["child1-f"]["contextOrphan"] is False
|
||||||
|
assert byId["child2-f"]["contextOrphan"] is False
|
||||||
|
assert byId["grandchild-f"]["contextOrphan"] is False
|
||||||
392
tests/unit/routes/test_folder_crud.py
Normal file
392
tests/unit/routes/test_folder_crud.py
Normal file
|
|
@ -0,0 +1,392 @@
|
||||||
|
# Copyright (c) 2026 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Unit tests for folder CRUD operations in ComponentObjects."""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, patch, MagicMock
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelFiles import FileFolder, FileItem
|
||||||
|
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
|
||||||
|
from modules.interfaces.interfaceDbManagement import ComponentObjects, FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
_MANDATE_ID = "mandate-test-1"
|
||||||
|
_FEATURE_INSTANCE_ID = "fi-test-1"
|
||||||
|
_USER_ID = "user-a-id"
|
||||||
|
|
||||||
|
|
||||||
|
# ── Fakes & helpers ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class _FakeDb:
|
||||||
|
"""In-memory database mock that mimics DatabaseConnector for unit tests."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._tables: Dict[str, Dict[str, Dict[str, Any]]] = {}
|
||||||
|
self.connection = MagicMock()
|
||||||
|
|
||||||
|
def getRecordset(self, modelClass, recordFilter=None):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
records = list(self._tables.get(tableName, {}).values())
|
||||||
|
if not recordFilter:
|
||||||
|
return records
|
||||||
|
return [
|
||||||
|
r for r in records
|
||||||
|
if all(r.get(k) == v for k, v in recordFilter.items())
|
||||||
|
]
|
||||||
|
|
||||||
|
def recordCreate(self, modelClass, data):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
self._tables.setdefault(tableName, {})
|
||||||
|
rec = data.model_dump() if hasattr(data, "model_dump") else dict(data)
|
||||||
|
rec.setdefault("id", str(uuid.uuid4()))
|
||||||
|
self._tables[tableName][rec["id"]] = rec
|
||||||
|
return rec
|
||||||
|
|
||||||
|
def recordModify(self, modelClass, recordId, updates):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
tbl = self._tables.get(tableName, {})
|
||||||
|
if recordId in tbl:
|
||||||
|
tbl[recordId].update(updates)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def recordDelete(self, modelClass, recordId):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
tbl = self._tables.get(tableName, {})
|
||||||
|
if recordId in tbl:
|
||||||
|
del tbl[recordId]
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def updateContext(self, userId):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _ensure_connection(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _ensureTableExists(self, modelClass):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def seed(self, modelClass, record: Dict[str, Any]):
|
||||||
|
tableName = modelClass.__name__
|
||||||
|
self._tables.setdefault(tableName, {})
|
||||||
|
self._tables[tableName][record["id"]] = dict(record)
|
||||||
|
|
||||||
|
|
||||||
|
def _makeUser(userId=_USER_ID, username="testuser"):
|
||||||
|
return User(id=userId, username=username, language="en")
|
||||||
|
|
||||||
|
|
||||||
|
def _makeRbac(
|
||||||
|
createLevel=AccessLevel.ALL,
|
||||||
|
readLevel=AccessLevel.ALL,
|
||||||
|
updateLevel=AccessLevel.ALL,
|
||||||
|
deleteLevel=AccessLevel.ALL,
|
||||||
|
):
|
||||||
|
rbac = Mock()
|
||||||
|
perms = UserPermissions(
|
||||||
|
view=True,
|
||||||
|
read=readLevel,
|
||||||
|
create=createLevel,
|
||||||
|
update=updateLevel,
|
||||||
|
delete=deleteLevel,
|
||||||
|
)
|
||||||
|
rbac.getUserPermissions.return_value = perms
|
||||||
|
return rbac
|
||||||
|
|
||||||
|
|
||||||
|
def _buildComponent(
|
||||||
|
userId=_USER_ID,
|
||||||
|
fakeDb=None,
|
||||||
|
rbac=None,
|
||||||
|
mandateId=_MANDATE_ID,
|
||||||
|
featureInstanceId=_FEATURE_INSTANCE_ID,
|
||||||
|
):
|
||||||
|
"""Construct a ComponentObjects with mocked internals (no real DB)."""
|
||||||
|
with patch.object(ComponentObjects, "__init__", lambda self: None):
|
||||||
|
comp = ComponentObjects()
|
||||||
|
comp.db = fakeDb or _FakeDb()
|
||||||
|
comp.currentUser = _makeUser(userId)
|
||||||
|
comp.userId = userId
|
||||||
|
comp.mandateId = mandateId
|
||||||
|
comp.featureInstanceId = featureInstanceId
|
||||||
|
comp.rbac = rbac or _makeRbac()
|
||||||
|
comp.userLanguage = "en"
|
||||||
|
return comp
|
||||||
|
|
||||||
|
|
||||||
|
def _rbacFromFakeDb(fakeDb):
|
||||||
|
"""Side-effect for getRecordsetWithRBAC that delegates to _FakeDb."""
|
||||||
|
def _fn(connector, modelClass, currentUser, recordFilter=None, **kwargs):
|
||||||
|
return fakeDb.getRecordset(modelClass, recordFilter=recordFilter)
|
||||||
|
return _fn
|
||||||
|
|
||||||
|
|
||||||
|
def _makeFolder(
|
||||||
|
folderId=None, name="Folder", parentId=None,
|
||||||
|
userId=_USER_ID, scope="personal", neutralize=False,
|
||||||
|
):
|
||||||
|
return {
|
||||||
|
"id": folderId or str(uuid.uuid4()),
|
||||||
|
"name": name,
|
||||||
|
"parentId": parentId,
|
||||||
|
"mandateId": _MANDATE_ID,
|
||||||
|
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||||
|
"scope": scope,
|
||||||
|
"neutralize": neutralize,
|
||||||
|
"sysCreatedBy": userId,
|
||||||
|
"sysCreatedAt": 1700000000.0,
|
||||||
|
"sysModifiedAt": 1700000000.0,
|
||||||
|
"sysModifiedBy": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _makeFile(fileId=None, folderId=None, userId=_USER_ID, scope="personal"):
|
||||||
|
return {
|
||||||
|
"id": fileId or str(uuid.uuid4()),
|
||||||
|
"fileName": "test.txt",
|
||||||
|
"mimeType": "text/plain",
|
||||||
|
"fileHash": "abc123",
|
||||||
|
"fileSize": 100,
|
||||||
|
"folderId": folderId,
|
||||||
|
"mandateId": _MANDATE_ID,
|
||||||
|
"featureInstanceId": _FEATURE_INSTANCE_ID,
|
||||||
|
"scope": scope,
|
||||||
|
"neutralize": False,
|
||||||
|
"sysCreatedBy": userId,
|
||||||
|
"sysCreatedAt": 1700000000.0,
|
||||||
|
"sysModifiedAt": 1700000000.0,
|
||||||
|
"sysModifiedBy": None,
|
||||||
|
"tags": None,
|
||||||
|
"description": None,
|
||||||
|
"status": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# ── Test class ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@patch("modules.interfaces.interfaceDbManagement.getRecordsetWithRBAC")
|
||||||
|
class TestFolderCrud:
|
||||||
|
"""Tests for folder create / rename / move / delete / patch operations."""
|
||||||
|
|
||||||
|
# ── Create ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testCreateFolderHappyPath(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.createFolder("Test Folder")
|
||||||
|
|
||||||
|
assert result["name"] == "Test Folder"
|
||||||
|
assert result["scope"] == "personal"
|
||||||
|
assert result["parentId"] is None
|
||||||
|
assert result["mandateId"] == _MANDATE_ID
|
||||||
|
|
||||||
|
def testCreateFolderWithParent(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="parent-1", name="Parent"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.createFolder("Child Folder", parentId="parent-1")
|
||||||
|
|
||||||
|
assert result["name"] == "Child Folder"
|
||||||
|
assert result["parentId"] == "parent-1"
|
||||||
|
|
||||||
|
def testCreateFolderMissingNameNoInterfaceValidation(self, mockRbacGet):
|
||||||
|
"""Interface does not validate empty name; the route layer returns 400."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.createFolder("")
|
||||||
|
assert result["name"] == ""
|
||||||
|
|
||||||
|
# ── Rename ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testRenameFolderHappyPath(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Old Name"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.renameFolder("f-1", "New Name")
|
||||||
|
|
||||||
|
assert result["name"] == "New Name"
|
||||||
|
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["name"] == "New Name"
|
||||||
|
|
||||||
|
def testRenameFolderNotFound(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
comp.renameFolder("nonexistent", "New Name")
|
||||||
|
|
||||||
|
# ── Move ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testMoveFolderHappyPath(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Movable"))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="t-1", name="Target"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.moveFolder("f-1", "t-1")
|
||||||
|
|
||||||
|
assert result["parentId"] == "t-1"
|
||||||
|
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["parentId"] == "t-1"
|
||||||
|
|
||||||
|
def testMoveFolderToRoot(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", name="Nested", parentId="old"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.moveFolder("f-1", None)
|
||||||
|
|
||||||
|
assert result["parentId"] is None
|
||||||
|
|
||||||
|
def testMoveFolderCircularReference(self, mockRbacGet):
|
||||||
|
"""A -> B -> C: moving A under C creates a cycle."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="a", name="A", parentId=None))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="b", name="B", parentId="a"))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="c", name="C", parentId="b"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="circular reference"):
|
||||||
|
comp.moveFolder("a", "c")
|
||||||
|
|
||||||
|
# ── Delete cascade ────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testDeleteFolderCascade(self, mockRbacGet):
|
||||||
|
"""Deleting root folder removes root + child + their files."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="root", name="Root"))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="child", name="Child", parentId="root"))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="root"))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="file-2", folderId="child"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.deleteFolderCascade("root")
|
||||||
|
|
||||||
|
assert result["deletedFolders"] == 2
|
||||||
|
assert result["deletedFiles"] == 2
|
||||||
|
|
||||||
|
def testDeleteFolderNotFound(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
comp.deleteFolderCascade("nonexistent")
|
||||||
|
|
||||||
|
# ── Patch scope ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testPatchScopeNoCascade(self, mockRbacGet):
|
||||||
|
"""Change folder scope without cascading to files."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", scope="personal"))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="f-1"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.patchFolderScope("f-1", "mandate", cascadeToFiles=False)
|
||||||
|
|
||||||
|
assert result["scope"] == "mandate"
|
||||||
|
assert result["filesUpdated"] == 0
|
||||||
|
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["scope"] == "mandate"
|
||||||
|
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["scope"] == "personal"
|
||||||
|
|
||||||
|
def testPatchScopeWithCascade(self, mockRbacGet):
|
||||||
|
"""cascadeToFiles=True updates only owned files in the folder."""
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", scope="personal"))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="own-file", folderId="f-1"))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="other-file", folderId="f-1", userId="user-b"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.patchFolderScope("f-1", "mandate", cascadeToFiles=True)
|
||||||
|
|
||||||
|
assert result["filesUpdated"] == 1
|
||||||
|
assert fakeDb.getRecordset(FileItem, {"id": "own-file"})[0]["scope"] == "mandate"
|
||||||
|
assert fakeDb.getRecordset(FileItem, {"id": "other-file"})[0]["scope"] == "personal"
|
||||||
|
|
||||||
|
def testPatchScopeInvalid(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="Invalid scope"):
|
||||||
|
comp.patchFolderScope("f-1", "invalid_scope")
|
||||||
|
|
||||||
|
# ── Patch neutralize ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testPatchNeutralizeToggle(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="f-1", neutralize=False))
|
||||||
|
fakeDb.seed(FileItem, _makeFile(fileId="file-1", folderId="f-1"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
resultOn = comp.patchFolderNeutralize("f-1", True)
|
||||||
|
assert resultOn["neutralize"] is True
|
||||||
|
assert resultOn["filesUpdated"] == 1
|
||||||
|
assert fakeDb.getRecordset(FileFolder, {"id": "f-1"})[0]["neutralize"] is True
|
||||||
|
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["neutralize"] is True
|
||||||
|
|
||||||
|
resultOff = comp.patchFolderNeutralize("f-1", False)
|
||||||
|
assert resultOff["neutralize"] is False
|
||||||
|
assert fakeDb.getRecordset(FileItem, {"id": "file-1"})[0]["neutralize"] is False
|
||||||
|
|
||||||
|
# ── Tree queries ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def testGetOwnFolderTree(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="own-1", name="Mine"))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="other-1", name="Theirs", userId="user-b"))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.getOwnFolderTree()
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["id"] == "own-1"
|
||||||
|
|
||||||
|
def testGetSharedFolderTreeWithContextOrphan(self, mockRbacGet):
|
||||||
|
fakeDb = _FakeDb()
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(folderId="own", name="Own"))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="shared-root", name="Shared Root", userId="user-b", scope="mandate",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="shared-child", name="Shared Child", userId="user-b",
|
||||||
|
parentId="shared-root", scope="mandate",
|
||||||
|
))
|
||||||
|
fakeDb.seed(FileFolder, _makeFolder(
|
||||||
|
folderId="orphan", name="Orphan", userId="user-b",
|
||||||
|
parentId="invisible-parent", scope="mandate",
|
||||||
|
))
|
||||||
|
comp = _buildComponent(fakeDb=fakeDb)
|
||||||
|
mockRbacGet.side_effect = _rbacFromFakeDb(fakeDb)
|
||||||
|
|
||||||
|
result = comp.getSharedFolderTree()
|
||||||
|
|
||||||
|
ids = {r["id"] for r in result}
|
||||||
|
assert "own" not in ids
|
||||||
|
assert "shared-root" in ids
|
||||||
|
assert "shared-child" in ids
|
||||||
|
assert "orphan" in ids
|
||||||
|
|
||||||
|
byId = {r["id"]: r for r in result}
|
||||||
|
assert byId["shared-root"]["contextOrphan"] is False
|
||||||
|
assert byId["shared-child"]["contextOrphan"] is False
|
||||||
|
assert byId["orphan"]["contextOrphan"] is True
|
||||||
Loading…
Reference in a new issue