fix: completely fixed grouping to be like clickup grouping, removed wrong mechanisms
This commit is contained in:
parent
9ae2ffc415
commit
5455e09367
15 changed files with 1227 additions and 810 deletions
3
app.py
3
app.py
|
|
@ -600,6 +600,9 @@ app.include_router(promptRouter)
|
|||
from modules.routes.routeDataConnections import router as connectionsRouter
|
||||
app.include_router(connectionsRouter)
|
||||
|
||||
from modules.routes.routeTableViews import router as tableViewsRouter
|
||||
app.include_router(tableViewsRouter)
|
||||
|
||||
from modules.routes.routeSecurityLocal import router as localRouter
|
||||
app.include_router(localRouter)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,50 +9,95 @@ All models use camelStyle naming convention for consistency with frontend.
|
|||
from typing import List, Dict, Any, Optional, Generic, TypeVar
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
import math
|
||||
import uuid
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Table Grouping models
|
||||
# Group layout models (Strategy B — derived from Views, purely presentational)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TableGroupNode(BaseModel):
|
||||
class GroupByLevel(BaseModel):
|
||||
"""One level of a multi-level grouping definition, stored inside a TableListView config."""
|
||||
field: str = Field(..., description="Field key to group by")
|
||||
nullLabel: str = Field(default="—", description="Display label for null/empty values")
|
||||
direction: str = Field(
|
||||
default="asc",
|
||||
description="Order of group bands at this level: 'asc' or 'desc'",
|
||||
)
|
||||
|
||||
|
||||
class GroupBand(BaseModel):
|
||||
"""
|
||||
A single node in a user-defined group tree for a FormGeneratorTable.
|
||||
A contiguous block of rows that share the same group path, intersecting the current page.
|
||||
|
||||
Items belong to exactly one group (no multi-membership).
|
||||
Groups can be nested to arbitrary depth via subGroups.
|
||||
startRowIndex and rowCount are 0-based indices relative to the current page's items[].
|
||||
"""
|
||||
id: str
|
||||
name: str
|
||||
itemIds: List[str] = Field(default_factory=list)
|
||||
subGroups: List['TableGroupNode'] = Field(default_factory=list)
|
||||
order: int = 0
|
||||
isExpanded: bool = True
|
||||
|
||||
TableGroupNode.model_rebuild()
|
||||
path: List[str] = Field(..., description="Hierarchical group key (one entry per level)")
|
||||
label: str = Field(..., description="Display label for this band (last path element)")
|
||||
startRowIndex: int = Field(..., description="0-based start index within items[] on this page")
|
||||
rowCount: int = Field(..., description="Number of items in this band on this page")
|
||||
|
||||
|
||||
class TableGrouping(BaseModel):
|
||||
class GroupLayout(BaseModel):
|
||||
"""
|
||||
Persisted grouping configuration for one (user, contextKey) pair.
|
||||
Stored in table_groupings in poweron_app (auto-created).
|
||||
Grouping structure for the current response page.
|
||||
Included only when the effective view has groupByLevels configured.
|
||||
The frontend renders group header rows by iterating bands and inserting
|
||||
headers before each startRowIndex.
|
||||
"""
|
||||
levels: List[str] = Field(..., description="Ordered field keys that define the grouping hierarchy")
|
||||
bands: List[GroupBand] = Field(..., description="Bands intersecting the current page, in order")
|
||||
|
||||
|
||||
class AppliedViewMeta(BaseModel):
|
||||
"""Minimal metadata about the view that was applied to this response."""
|
||||
viewKey: Optional[str] = None
|
||||
displayName: Optional[str] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Persisted view model
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TableListView(BaseModel):
|
||||
"""
|
||||
A saved table view for one (userId, contextKey) pair.
|
||||
|
||||
config schema (schemaVersion=1):
|
||||
{
|
||||
"schemaVersion": 1,
|
||||
"filters": {}, # same structure as PaginationParams.filters
|
||||
"sort": [], # same structure as PaginationParams.sort
|
||||
"groupByLevels": [ # ordered grouping levels
|
||||
{"field": "scope", "nullLabel": "—", "direction": "asc"}
|
||||
],
|
||||
"collapsedSectionKeys": [], # optional: section UI (stable group keys)
|
||||
"collapsedGroupKeys": [], # optional: inline group bands (path.join('///'))
|
||||
}
|
||||
|
||||
contextKey convention: API path without /api/ prefix and without trailing slash.
|
||||
Examples: "connections", "prompts", "admin/users", "trustee/{instanceId}/documents"
|
||||
Examples: "connections", "prompts", "admin/users", "files/list"
|
||||
|
||||
viewKey is a user-defined slug, unique per (userId, mandateId, contextKey).
|
||||
"""
|
||||
id: str
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str
|
||||
mandateId: Optional[str] = None
|
||||
contextKey: str
|
||||
rootGroups: List[TableGroupNode] = Field(default_factory=list)
|
||||
viewKey: str
|
||||
displayName: str
|
||||
config: Dict[str, Any] = Field(default_factory=dict)
|
||||
updatedAt: Optional[float] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sort and pagination models
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class SortField(BaseModel):
|
||||
"""
|
||||
Single sort field configuration.
|
||||
"""
|
||||
"""Single sort field configuration."""
|
||||
field: str = Field(..., description="Field name to sort by")
|
||||
direction: str = Field(..., description="Sort direction: 'asc' or 'desc'")
|
||||
|
||||
|
|
@ -61,16 +106,13 @@ class PaginationParams(BaseModel):
|
|||
"""
|
||||
Complete pagination state including page, sorting, and filters.
|
||||
|
||||
Grouping extensions (both optional — omit when not using grouping):
|
||||
groupId — Scope the request to items belonging to this group.
|
||||
The backend resolves it to an itemIds IN-filter before
|
||||
applying normal pagination/search/filter logic.
|
||||
Also applied for mode=ids and mode=filterValues so that
|
||||
bulk-select and filter-dropdowns respect the group scope.
|
||||
saveGroupTree — If present the backend persists this tree for the current
|
||||
(user, contextKey) pair *before* fetching, then returns
|
||||
the confirmed tree in the response groupTree field.
|
||||
Omit on every request that does not change the group tree.
|
||||
View extension (optional):
|
||||
viewKey — Slug of a saved TableListView for this (user, contextKey) pair.
|
||||
The server loads the view, merges its filters/sort/groupByLevels
|
||||
into the effective query (request fields take priority over view
|
||||
defaults for explicitly provided fields), and returns groupLayout
|
||||
in the response when groupByLevels is non-empty.
|
||||
Omit or set to None for the default (ungrouped) view.
|
||||
"""
|
||||
page: int = Field(ge=1, description="Current page number (1-based)")
|
||||
pageSize: int = Field(ge=1, le=1000, description="Number of items per page")
|
||||
|
|
@ -85,13 +127,16 @@ class PaginationParams(BaseModel):
|
|||
- Supported operators: equals/eq, contains, startsWith, endsWith, gt, gte, lt, lte, in, notIn
|
||||
- Multiple filters are combined with AND logic"""
|
||||
)
|
||||
groupId: Optional[str] = Field(
|
||||
viewKey: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Scope request to items of this group (resolved server-side to itemIds IN-filter)",
|
||||
description="Slug of a saved view to load; server merges view config into effective query",
|
||||
)
|
||||
saveGroupTree: Optional[List[Dict[str, Any]]] = Field(
|
||||
groupByLevels: Optional[List[GroupByLevel]] = Field(
|
||||
default=None,
|
||||
description="If set, persist this group tree before fetching (optimistic save)",
|
||||
description=(
|
||||
"When set (including an empty list), replaces the saved view's groupByLevels for this request. "
|
||||
"Omit entirely to use grouping from the view only."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -130,16 +175,22 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
|||
"""
|
||||
Response containing paginated data and metadata.
|
||||
|
||||
groupTree is included when the endpoint supports table grouping and the
|
||||
current user has a saved group tree for the requested contextKey.
|
||||
It is None when grouping is not configured for the endpoint or the user
|
||||
has not created any groups yet. Frontend must treat None as an empty tree.
|
||||
groupLayout is included when the effective view has groupByLevels configured.
|
||||
It describes how to render group header rows in the current page's items[].
|
||||
Omitted (None) when no grouping is active.
|
||||
|
||||
appliedView describes which saved view was merged into this response,
|
||||
allowing the frontend to synchronise its view selector.
|
||||
"""
|
||||
items: List[T] = Field(..., description="Array of items for current page")
|
||||
pagination: Optional[PaginationMetadata] = Field(..., description="Pagination metadata (None if pagination not applied)")
|
||||
groupTree: Optional[List[TableGroupNode]] = Field(
|
||||
groupLayout: Optional[GroupLayout] = Field(
|
||||
default=None,
|
||||
description="Current group tree for this (user, contextKey) pair — None if no grouping configured",
|
||||
description="Group band structure for this page (None if no grouping active)",
|
||||
)
|
||||
appliedView: Optional[AppliedViewMeta] = Field(
|
||||
default=None,
|
||||
description="Metadata about the view applied to this response",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
|
@ -148,34 +199,30 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
|||
def normalize_pagination_dict(pagination_dict: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize pagination dictionary to handle frontend variations.
|
||||
Moves top-level "search" field into filters if present.
|
||||
Grouping fields (groupId, saveGroupTree) are passed through as-is.
|
||||
|
||||
Args:
|
||||
pagination_dict: Raw pagination dictionary from frontend
|
||||
|
||||
Returns:
|
||||
Normalized pagination dictionary ready for PaginationParams parsing
|
||||
- Moves top-level "search" field into filters if present.
|
||||
- Silently drops legacy fields (groupId, saveGroupTree) that were part of the
|
||||
old tree-grouping implementation so old clients do not cause validation errors.
|
||||
- Passes viewKey through unchanged.
|
||||
"""
|
||||
if not pagination_dict:
|
||||
return pagination_dict
|
||||
|
||||
# Create a copy to avoid modifying the original
|
||||
normalized = dict(pagination_dict)
|
||||
|
||||
# Ensure required fields have sensible defaults
|
||||
if "page" not in normalized:
|
||||
normalized["page"] = 1
|
||||
if "pageSize" not in normalized:
|
||||
normalized["pageSize"] = 25
|
||||
|
||||
# Move top-level "search" into filters if present
|
||||
# Move top-level "search" into filters
|
||||
if "search" in normalized:
|
||||
if "filters" not in normalized or normalized["filters"] is None:
|
||||
normalized["filters"] = {}
|
||||
normalized["filters"]["search"] = normalized.pop("search")
|
||||
|
||||
# groupId / saveGroupTree are valid PaginationParams fields — pass through unchanged.
|
||||
# No transformation needed; Pydantic will validate them.
|
||||
# Drop legacy tree-grouping fields — harmless if already absent
|
||||
normalized.pop("groupId", None)
|
||||
normalized.pop("saveGroupTree", None)
|
||||
|
||||
return normalized
|
||||
|
|
|
|||
|
|
@ -4028,58 +4028,92 @@ class AppObjects:
|
|||
raise
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Table Grouping (user-defined groups for FormGeneratorTable instances)
|
||||
# Table List Views (saved display presets: filters, sort, groupByLevels)
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def getTableGrouping(self, contextKey: str):
|
||||
"""
|
||||
Load the group tree for the current user and the given contextKey.
|
||||
|
||||
Returns a TableGrouping instance or None if no grouping has been saved yet.
|
||||
contextKey identifies the table instance, e.g. "connections", "prompts",
|
||||
"admin/users", "trustee/{instanceId}/documents".
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGrouping
|
||||
def getTableListViews(self, contextKey: str) -> list:
|
||||
"""Return all saved views for the current user and contextKey."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
records = self.db.getRecordset(
|
||||
TableGrouping,
|
||||
rows = self.db.getRecordset(
|
||||
TableListView,
|
||||
recordFilter={"userId": str(self.userId), "contextKey": contextKey},
|
||||
)
|
||||
if not records:
|
||||
return None
|
||||
row = records[0]
|
||||
return TableGrouping.model_validate(row) if isinstance(row, dict) else row
|
||||
result = []
|
||||
for row in (rows or []):
|
||||
try:
|
||||
result.append(TableListView.model_validate(row) if isinstance(row, dict) else row)
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"getTableGrouping failed for user={self.userId} key={contextKey}: {e}")
|
||||
logger.error(f"getTableListViews failed for user={self.userId} context={contextKey}: {e}")
|
||||
return []
|
||||
|
||||
def getTableListView(self, contextKey: str, viewKey: str):
|
||||
"""Return one view by viewKey or None if not found."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
rows = self.db.getRecordset(
|
||||
TableListView,
|
||||
recordFilter={"userId": str(self.userId), "contextKey": contextKey, "viewKey": viewKey},
|
||||
)
|
||||
if not rows:
|
||||
return None
|
||||
row = rows[0]
|
||||
return TableListView.model_validate(row) if isinstance(row, dict) else row
|
||||
except Exception as e:
|
||||
logger.error(f"getTableListView failed for user={self.userId} key={viewKey}: {e}")
|
||||
return None
|
||||
|
||||
def upsertTableGrouping(self, contextKey: str, rootGroups: list):
|
||||
"""
|
||||
Create or replace the group tree for the current user and contextKey.
|
||||
def createTableListView(self, contextKey: str, viewKey: str, displayName: str, config: dict):
|
||||
"""Create a new view. Raises ValueError if viewKey already exists for this context."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
if self.getTableListView(contextKey=contextKey, viewKey=viewKey) is not None:
|
||||
raise ValueError(f"View '{viewKey}' already exists for context '{contextKey}'")
|
||||
data = {
|
||||
"id": str(uuid.uuid4()),
|
||||
"userId": str(self.userId),
|
||||
"contextKey": contextKey,
|
||||
"viewKey": viewKey,
|
||||
"displayName": displayName,
|
||||
"config": config,
|
||||
"updatedAt": getUtcTimestamp(),
|
||||
}
|
||||
try:
|
||||
self.db.recordCreate(TableListView, data)
|
||||
return TableListView.model_validate(data)
|
||||
except Exception as e:
|
||||
logger.error(f"createTableListView failed: {e}")
|
||||
raise
|
||||
|
||||
rootGroups is a list of TableGroupNode-compatible dicts (the full tree).
|
||||
Returns the saved TableGrouping instance.
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGrouping
|
||||
def updateTableListView(self, viewId: str, updates: dict):
|
||||
"""Update an existing view by its primary key id."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
try:
|
||||
existing = self.getTableGrouping(contextKey)
|
||||
data = {
|
||||
"id": existing.id if existing else str(uuid.uuid4()),
|
||||
"userId": str(self.userId),
|
||||
"contextKey": contextKey,
|
||||
"rootGroups": rootGroups,
|
||||
"updatedAt": getUtcTimestamp(),
|
||||
}
|
||||
if existing:
|
||||
self.db.recordModify(TableGrouping, existing.id, data)
|
||||
else:
|
||||
self.db.recordCreate(TableGrouping, data)
|
||||
return TableGrouping.model_validate(data)
|
||||
updates = {**updates, "updatedAt": getUtcTimestamp()}
|
||||
self.db.recordModify(TableListView, viewId, updates)
|
||||
rows = self.db.getRecordset(TableListView, recordFilter={"id": viewId})
|
||||
if rows:
|
||||
row = rows[0]
|
||||
return TableListView.model_validate(row) if isinstance(row, dict) else row
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"upsertTableGrouping failed for user={self.userId} key={contextKey}: {e}")
|
||||
logger.error(f"updateTableListView failed for id={viewId}: {e}")
|
||||
raise
|
||||
|
||||
def deleteTableListView(self, viewId: str) -> bool:
|
||||
"""Delete a view by primary key id. Returns True on success."""
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
try:
|
||||
self.db.recordDelete(TableListView, viewId)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"deleteTableListView failed for id={viewId}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Public Methods
|
||||
|
||||
|
|
|
|||
|
|
@ -1532,44 +1532,8 @@ class ComponentObjects:
|
|||
raise FileDeletionError(f"Error deleting files in batch: {str(e)}")
|
||||
|
||||
def _ensureFeatureInstanceGroup(self, featureInstanceId: str, contextKey: str = "files/list") -> Optional[str]:
|
||||
"""Return the groupId of the default group for a feature instance.
|
||||
Creates the group if it doesn't exist yet."""
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(self._currentUser)
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
# Look for group with name matching featureInstanceId
|
||||
def _find(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
nmeta = nd.get("meta", {}) if isinstance(nd, dict) else getattr(nd, "meta", {})
|
||||
if (nmeta or {}).get("featureInstanceId") == featureInstanceId:
|
||||
return nid
|
||||
subs = nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])
|
||||
result = _find(subs)
|
||||
if result:
|
||||
return result
|
||||
return None
|
||||
found = _find(nodes)
|
||||
if found:
|
||||
return found
|
||||
# Create new group
|
||||
import uuid
|
||||
newId = str(uuid.uuid4())
|
||||
newGroup = {
|
||||
"id": newId,
|
||||
"name": featureInstanceId,
|
||||
"itemIds": [],
|
||||
"subGroups": [],
|
||||
"meta": {"featureInstanceId": featureInstanceId},
|
||||
}
|
||||
nodes.append(newGroup)
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return newId
|
||||
except Exception as e:
|
||||
logger.error(f"_ensureFeatureInstanceGroup failed: {e}")
|
||||
return None
|
||||
"""Stub — file group tree removed. Returns None."""
|
||||
return None
|
||||
|
||||
def copyFile(self, sourceFileId: str, newFileName: Optional[str] = None) -> FileItem:
|
||||
"""Create a full duplicate of a file (FileItem + FileData)."""
|
||||
|
|
|
|||
|
|
@ -9,9 +9,9 @@ Features:
|
|||
- Admin endpoints: Manage settings, add credits, view all accounts
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query, Header
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query, Header, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import status
|
||||
import logging
|
||||
from datetime import date, datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
|
|
@ -24,7 +24,13 @@ from modules.interfaces.interfaceDbBilling import getInterface as getBillingInte
|
|||
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import getService as getBillingService
|
||||
import json
|
||||
import math
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.datamodels.datamodelPagination import (
|
||||
PaginationParams,
|
||||
PaginatedResponse,
|
||||
PaginationMetadata,
|
||||
normalize_pagination_dict,
|
||||
AppliedViewMeta,
|
||||
)
|
||||
from modules.datamodels.datamodelBilling import (
|
||||
BillingAccount,
|
||||
BillingTransaction,
|
||||
|
|
@ -478,50 +484,193 @@ def getBalanceForMandate(
|
|||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/transactions", response_model=List[TransactionResponse])
|
||||
def _normalize_billing_tx_dict(t: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Make billing transaction rows JSON/grouping-safe (datetimes → str, enums → str)."""
|
||||
from datetime import date as date_cls, datetime as dt_cls
|
||||
|
||||
r = dict(t)
|
||||
for k, v in list(r.items()):
|
||||
if isinstance(v, dt_cls):
|
||||
r[k] = v.isoformat()
|
||||
elif isinstance(v, date_cls):
|
||||
r[k] = v.isoformat()
|
||||
for ek in ("transactionType", "referenceType"):
|
||||
if ek in r and r[ek] is not None and not isinstance(r[ek], str):
|
||||
ev = r[ek]
|
||||
r[ek] = getattr(ev, "value", None) or str(ev)
|
||||
return r
|
||||
|
||||
|
||||
def _load_billing_user_transactions_normalized(billingService) -> List[Dict[str, Any]]:
|
||||
raw = billingService.getTransactionHistory(limit=5000)
|
||||
return [_normalize_billing_tx_dict(t) for t in raw]
|
||||
|
||||
|
||||
def _view_user_transactions_filtered_list(
|
||||
billing_interface,
|
||||
load_mandate_ids: Optional[List[str]],
|
||||
effective_scope: str,
|
||||
personal_user_id: Optional[str],
|
||||
pagination_params: PaginationParams,
|
||||
ctx_user,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Up to 5000 rows: SQL window + in-memory filters/sort (incl. enriched columns)."""
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
bulk_params = pagination_params.model_copy(deep=True)
|
||||
bulk_params.page = 1
|
||||
bulk_params.pageSize = 5000
|
||||
bulk_result = billing_interface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=load_mandate_ids,
|
||||
pagination=bulk_params,
|
||||
scope=effective_scope,
|
||||
userId=personal_user_id,
|
||||
)
|
||||
all_items = [_normalize_billing_tx_dict(dict(x)) for x in bulk_result.items]
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(ctx_user)
|
||||
if pagination_params.filters:
|
||||
all_items = comp._applyFilters(all_items, pagination_params.filters)
|
||||
if pagination_params.sort:
|
||||
all_items = comp._applySorting(all_items, pagination_params.sort)
|
||||
return all_items
|
||||
|
||||
|
||||
@router.get("/transactions")
|
||||
@limiter.limit("30/minute")
|
||||
def getTransactions(
|
||||
request: Request,
|
||||
limit: int = Query(default=50, ge=1, le=500),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
ctx: RequestContext = Depends(getRequestContext)
|
||||
pagination: Optional[str] = Query(
|
||||
None,
|
||||
description="JSON PaginationParams for table UI (filters, sort, viewKey, groupByLevels).",
|
||||
),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' | 'ids' with pagination"),
|
||||
column: Optional[str] = Query(None, description="Column for mode=filterValues"),
|
||||
ctx: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""
|
||||
Get transaction history across all mandates the user belongs to.
|
||||
|
||||
Without ``pagination`` query: legacy behaviour — returns a JSON array of
|
||||
transactions (`limit`/`offset` window).
|
||||
|
||||
With ``pagination`` JSON: returns ``{ items, pagination, groupLayout?, appliedView? }``.
|
||||
Table list views use contextKey ``billing/transactions``.
|
||||
"""
|
||||
try:
|
||||
billingService = getBillingService(
|
||||
ctx.user,
|
||||
ctx.mandateId,
|
||||
featureCode="billing"
|
||||
featureCode="billing",
|
||||
)
|
||||
|
||||
# Fetch enough transactions for pagination
|
||||
|
||||
if pagination:
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
buildGroupLayout,
|
||||
effective_group_by_levels,
|
||||
handleFilterValuesInMemory,
|
||||
handleIdsInMemory,
|
||||
resolveView,
|
||||
)
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
CONTEXT_KEY = "billing/transactions"
|
||||
|
||||
try:
|
||||
paginationDict = json.loads(pagination)
|
||||
if not paginationDict:
|
||||
raise ValueError("empty pagination")
|
||||
paginationDict = normalize_pagination_dict(paginationDict)
|
||||
paginationParams = PaginationParams(**paginationDict)
|
||||
except (json.JSONDecodeError, ValueError, TypeError) as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
appInterface = getAppInterface(ctx.user)
|
||||
viewKey = paginationParams.viewKey
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
all_items = _load_billing_user_transactions_normalized(billingService)
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
return handleFilterValuesInMemory(all_items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
return handleIdsInMemory(all_items, pagination)
|
||||
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(ctx.user)
|
||||
if paginationParams.filters:
|
||||
all_items = comp._applyFilters(all_items, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
all_items = comp._applySorting(all_items, paginationParams.sort)
|
||||
|
||||
totalItems = len(all_items)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
|
||||
if not groupByLevels:
|
||||
pstart = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
page_items = all_items[pstart : pstart + paginationParams.pageSize]
|
||||
group_layout = None
|
||||
else:
|
||||
page_items, group_layout = buildGroupLayout(
|
||||
all_items,
|
||||
groupByLevels,
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
)
|
||||
|
||||
resp: Dict[str, Any] = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
).model_dump(),
|
||||
}
|
||||
if group_layout:
|
||||
resp["groupLayout"] = group_layout.model_dump()
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return JSONResponse(content=resp)
|
||||
|
||||
transactions = billingService.getTransactionHistory(limit=offset + limit)
|
||||
|
||||
# Convert to response model
|
||||
result = []
|
||||
for t in transactions[offset:offset + limit]:
|
||||
result.append(TransactionResponse(
|
||||
id=t.get("id"),
|
||||
accountId=t.get("accountId"),
|
||||
transactionType=TransactionTypeEnum(t.get("transactionType", "DEBIT")),
|
||||
amount=t.get("amount", 0.0),
|
||||
description=t.get("description", ""),
|
||||
referenceType=ReferenceTypeEnum(t["referenceType"]) if t.get("referenceType") else None,
|
||||
workflowId=t.get("workflowId"),
|
||||
featureCode=t.get("featureCode"),
|
||||
featureInstanceId=t.get("featureInstanceId"),
|
||||
aicoreProvider=t.get("aicoreProvider"),
|
||||
aicoreModel=t.get("aicoreModel"),
|
||||
createdByUserId=t.get("createdByUserId"),
|
||||
sysCreatedAt=t.get("sysCreatedAt"),
|
||||
mandateId=t.get("mandateId"),
|
||||
mandateName=t.get("mandateName")
|
||||
))
|
||||
|
||||
result: List[TransactionResponse] = []
|
||||
for t in transactions[offset : offset + limit]:
|
||||
result.append(
|
||||
TransactionResponse(
|
||||
id=t.get("id"),
|
||||
accountId=t.get("accountId"),
|
||||
transactionType=TransactionTypeEnum(t.get("transactionType", "DEBIT")),
|
||||
amount=t.get("amount", 0.0),
|
||||
description=t.get("description", ""),
|
||||
referenceType=ReferenceTypeEnum(t["referenceType"]) if t.get("referenceType") else None,
|
||||
workflowId=t.get("workflowId"),
|
||||
featureCode=t.get("featureCode"),
|
||||
featureInstanceId=t.get("featureInstanceId"),
|
||||
aicoreProvider=t.get("aicoreProvider"),
|
||||
aicoreModel=t.get("aicoreModel"),
|
||||
createdByUserId=t.get("createdByUserId"),
|
||||
sysCreatedAt=t.get("sysCreatedAt"),
|
||||
mandateId=t.get("mandateId"),
|
||||
mandateName=t.get("mandateName"),
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting billing transactions: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
@ -1757,7 +1906,7 @@ def getUserViewStatistics(
|
|||
|
||||
|
||||
@router.get("/view/users/transactions", response_model=PaginatedResponse[UserTransactionResponse])
|
||||
@limiter.limit("30/minute")
|
||||
@limiter.limit("120/minute")
|
||||
def getUserViewTransactions(
|
||||
request: Request,
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
|
|
@ -1808,7 +1957,6 @@ def getUserViewTransactions(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from fastapi.responses import JSONResponse
|
||||
crossFilterParams = parseCrossFilterPagination(column, pagination)
|
||||
values = billingInterface.getTransactionDistinctValues(
|
||||
mandateIds=loadMandateIds,
|
||||
|
|
@ -1820,7 +1968,6 @@ def getUserViewTransactions(
|
|||
return JSONResponse(content=values)
|
||||
|
||||
if mode == "ids":
|
||||
from fastapi.responses import JSONResponse
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
import json as _json
|
||||
|
|
@ -1835,6 +1982,66 @@ def getUserViewTransactions(
|
|||
) if hasattr(billingInterface, 'getTransactionIds') else []
|
||||
return JSONResponse(content=ids)
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
import json as _json
|
||||
from collections import defaultdict
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
effective_group_by_levels,
|
||||
resolveView,
|
||||
)
|
||||
|
||||
pagination_dict = _json.loads(pagination)
|
||||
pagination_dict = normalize_pagination_dict(pagination_dict)
|
||||
summary_params = PaginationParams(**pagination_dict)
|
||||
CONTEXT_KEY = "billing/view/users/transactions"
|
||||
app_interface = getAppInterface(ctx.user)
|
||||
summary_vk = summary_params.viewKey
|
||||
summary_view_cfg, _ = resolveView(app_interface, CONTEXT_KEY, summary_vk)
|
||||
summary_params = applyViewToParams(summary_params, summary_view_cfg)
|
||||
levels = effective_group_by_levels(summary_params, summary_view_cfg)
|
||||
if not levels or not levels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = levels[0]["field"]
|
||||
null_label = str(levels[0].get("nullLabel") or "—")
|
||||
all_rows = _view_user_transactions_filtered_list(
|
||||
billingInterface,
|
||||
loadMandateIds,
|
||||
scope,
|
||||
personalUserId,
|
||||
summary_params,
|
||||
ctx.user,
|
||||
)
|
||||
counts: Dict[str, int] = defaultdict(int)
|
||||
labels: Dict[str, str] = {}
|
||||
null_key = "\x00NULL"
|
||||
for item in all_rows:
|
||||
raw = item.get(field)
|
||||
if raw is None or raw == "":
|
||||
nk = null_key
|
||||
labels[nk] = null_label
|
||||
else:
|
||||
nk = str(raw)
|
||||
if nk not in labels:
|
||||
labels[nk] = nk
|
||||
counts[nk] += 1
|
||||
groups_out: List[Dict[str, Any]] = []
|
||||
for nk in sorted(counts.keys(), key=lambda x: (x == null_key, labels.get(x, x).lower())):
|
||||
groups_out.append(
|
||||
{
|
||||
"value": None if nk == null_key else nk,
|
||||
"label": labels.get(nk, nk),
|
||||
"totalCount": counts[nk],
|
||||
}
|
||||
)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
import json as _json
|
||||
|
|
@ -1847,15 +2054,21 @@ def getUserViewTransactions(
|
|||
if not paginationParams:
|
||||
paginationParams = PaginationParams(page=1, pageSize=50)
|
||||
|
||||
result = billingInterface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=loadMandateIds,
|
||||
pagination=paginationParams,
|
||||
scope=effectiveScope,
|
||||
userId=personalUserId,
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.routes.routeHelpers import (
|
||||
applyViewToParams,
|
||||
buildGroupLayout,
|
||||
effective_group_by_levels,
|
||||
resolveView,
|
||||
)
|
||||
|
||||
logger.debug(f"SQL-paginated {result.totalItems} transactions for user {ctx.user.id} "
|
||||
f"(scope={scope}, mandateId={mandateId}, page={paginationParams.page})")
|
||||
CONTEXT_KEY = "billing/view/users/transactions"
|
||||
appInterface = getAppInterface(ctx.user)
|
||||
viewKey = paginationParams.viewKey
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _toResponse(d):
|
||||
return UserTransactionResponse(
|
||||
|
|
@ -1875,9 +2088,56 @@ def getUserViewTransactions(
|
|||
mandateId=d.get("mandateId"),
|
||||
mandateName=d.get("mandateName"),
|
||||
userId=d.get("userId"),
|
||||
userName=d.get("userName")
|
||||
userName=d.get("userName"),
|
||||
)
|
||||
|
||||
if groupByLevels:
|
||||
all_items = _view_user_transactions_filtered_list(
|
||||
billingInterface,
|
||||
loadMandateIds,
|
||||
effectiveScope,
|
||||
personalUserId,
|
||||
paginationParams,
|
||||
ctx.user,
|
||||
)
|
||||
|
||||
totalItems = len(all_items)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, group_layout = buildGroupLayout(
|
||||
all_items,
|
||||
groupByLevels,
|
||||
paginationParams.page,
|
||||
paginationParams.pageSize,
|
||||
)
|
||||
resp: Dict[str, Any] = {
|
||||
"items": [_toResponse(d).model_dump(mode="json") for d in page_items],
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
).model_dump(mode="json"),
|
||||
}
|
||||
if group_layout:
|
||||
resp["groupLayout"] = group_layout.model_dump(mode="json")
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump(mode="json")
|
||||
return JSONResponse(content=resp)
|
||||
|
||||
result = billingInterface.getTransactionsForMandatesPaginated(
|
||||
mandateIds=loadMandateIds,
|
||||
pagination=paginationParams,
|
||||
scope=effectiveScope,
|
||||
userId=personalUserId,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"SQL-paginated {result.totalItems} transactions for user {ctx.user.id} "
|
||||
f"(scope={scope}, mandateId={mandateId}, page={paginationParams.page})"
|
||||
)
|
||||
|
||||
return PaginatedResponse(
|
||||
items=[_toResponse(d) for d in result.items],
|
||||
pagination=PaginationMetadata(
|
||||
|
|
@ -1887,7 +2147,7 @@ def getUserViewTransactions(
|
|||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import logging
|
|||
import json
|
||||
import math
|
||||
from urllib.parse import quote
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from modules.datamodels.datamodelUam import User, UserConnection, AuthAuthority, ConnectionStatus
|
||||
from modules.datamodels.datamodelSecurity import Token
|
||||
|
|
@ -154,12 +155,12 @@ async def get_connections(
|
|||
"""
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
CONTEXT_KEY = "connections"
|
||||
|
||||
# Parse pagination params early — needed for grouping in all modes
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -171,7 +172,13 @@ async def get_connections(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
interface = getInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, interface, CONTEXT_KEY)
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(interface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _buildEnhancedItems():
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
|
|
@ -200,7 +207,6 @@ async def get_connections(
|
|||
try:
|
||||
items = _buildEnhancedItems()
|
||||
enrichRowsWithFkLabels(items, UserConnection)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting filter values for connections: {str(e)}")
|
||||
|
|
@ -208,19 +214,60 @@ async def get_connections(
|
|||
|
||||
if mode == "ids":
|
||||
try:
|
||||
items = applyGroupScopeFilter(_buildEnhancedItems(), groupCtx.itemIds)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
return handleIdsInMemory(_buildEnhancedItems(), pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting IDs for connections: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
try:
|
||||
# NOTE: Cannot use db.getRecordsetPaginated() here because each connection
|
||||
# is enriched with computed tokenStatus/tokenExpiresAt (requires per-row DB lookup).
|
||||
# Token refresh also may trigger re-fetch. Connections per user are typically < 10,
|
||||
# so in-memory pagination is acceptable.
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
try:
|
||||
refresh_result = await token_refresh_service.refresh_expired_tokens(currentUser.id)
|
||||
if refresh_result.get("refreshed", 0) > 0:
|
||||
logger.info(
|
||||
"Silently refreshed %s tokens for user %s (groupSummary)",
|
||||
refresh_result["refreshed"],
|
||||
currentUser.id,
|
||||
)
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"Silent token refresh failed for user {currentUser.id}: {str(e)}")
|
||||
enhanced_connections_dict = []
|
||||
for connection in connections:
|
||||
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
|
||||
enhanced_connections_dict.append({
|
||||
"id": connection.id,
|
||||
"userId": connection.userId,
|
||||
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
|
||||
"externalId": connection.externalId,
|
||||
"externalUsername": connection.externalUsername or "",
|
||||
"externalEmail": connection.externalEmail,
|
||||
"status": connection.status.value if hasattr(connection.status, 'value') else str(connection.status),
|
||||
"connectedAt": connection.connectedAt,
|
||||
"lastChecked": connection.lastChecked,
|
||||
"expiresAt": connection.expiresAt,
|
||||
"tokenStatus": tokenStatus,
|
||||
"tokenExpiresAt": tokenExpiresAt
|
||||
})
|
||||
enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
|
||||
filtered = apply_strategy_b_filters_and_sort(enhanced_connections_dict, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
# SECURITY FIX: All users (including admins) can only see their own connections
|
||||
try:
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
|
||||
# Perform silent token refresh for expired OAuth connections
|
||||
|
|
@ -235,7 +282,7 @@ async def get_connections(
|
|||
enhanced_connections_dict = []
|
||||
for connection in connections:
|
||||
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
|
||||
connection_dict = {
|
||||
enhanced_connections_dict.append({
|
||||
"id": connection.id,
|
||||
"userId": connection.userId,
|
||||
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
|
||||
|
|
@ -248,46 +295,31 @@ async def get_connections(
|
|||
"expiresAt": connection.expiresAt,
|
||||
"tokenStatus": tokenStatus,
|
||||
"tokenExpiresAt": tokenExpiresAt
|
||||
}
|
||||
enhanced_connections_dict.append(connection_dict)
|
||||
})
|
||||
|
||||
enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
|
||||
enhanced_connections_dict = applyGroupScopeFilter(enhanced_connections_dict, groupCtx.itemIds)
|
||||
|
||||
if paginationParams is None:
|
||||
return {
|
||||
"items": enhanced_connections_dict,
|
||||
"pagination": None,
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
return {"items": enhanced_connections_dict, "pagination": None}
|
||||
|
||||
# Apply filtering if provided
|
||||
# Apply filtering and sorting over full list (Strategy B)
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
if paginationParams.filters:
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
enhanced_connections_dict = component_interface._applyFilters(
|
||||
enhanced_connections_dict,
|
||||
paginationParams.filters
|
||||
)
|
||||
|
||||
# Apply sorting if provided
|
||||
enhanced_connections_dict = component_interface._applyFilters(enhanced_connections_dict, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
component_interface = ComponentObjects()
|
||||
component_interface.setUserContext(currentUser)
|
||||
enhanced_connections_dict = component_interface._applySorting(
|
||||
enhanced_connections_dict,
|
||||
paginationParams.sort
|
||||
)
|
||||
enhanced_connections_dict = component_interface._applySorting(enhanced_connections_dict, paginationParams.sort)
|
||||
|
||||
totalItems = len(enhanced_connections_dict)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
endIdx = startIdx + paginationParams.pageSize
|
||||
paged_connections = enhanced_connections_dict[startIdx:endIdx]
|
||||
# Strategy B grouping: operates on full filtered+sorted list, then slices
|
||||
page_items, groupLayout = buildGroupLayout(
|
||||
enhanced_connections_dict, groupByLevels, paginationParams.page, paginationParams.pageSize
|
||||
)
|
||||
|
||||
return {
|
||||
"items": paged_connections,
|
||||
response: dict = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
|
|
@ -296,9 +328,13 @@ async def get_connections(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
|
||||
if groupLayout:
|
||||
response["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from fastapi.responses import JSONResponse
|
|||
from typing import List, Dict, Any, Optional
|
||||
import logging
|
||||
import json
|
||||
import math
|
||||
|
||||
# Import auth module
|
||||
from modules.auth import limiter, getCurrentUser, getRequestContext, RequestContext
|
||||
|
|
@ -500,9 +501,10 @@ def get_files(
|
|||
from modules.routes.routeHelpers import (
|
||||
handleIdsMode,
|
||||
handleFilterValuesInMemory,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
|
|
@ -510,11 +512,40 @@ def get_files(
|
|||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, "files/list")
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, "files/list", viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _filesToDicts(fileItems):
|
||||
return [f.model_dump() if hasattr(f, "model_dump") else (dict(f) if not isinstance(f, dict) else f) for f in fileItems]
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
allFiles = managementInterface.getAllFiles()
|
||||
allItems = enrichRowsWithFkLabels(
|
||||
_filesToDicts(allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])),
|
||||
FileItem,
|
||||
)
|
||||
filtered = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
|
|
@ -522,33 +553,72 @@ def get_files(
|
|||
items = allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])
|
||||
itemDicts = _filesToDicts(items)
|
||||
enrichRowsWithFkLabels(itemDicts, FileItem)
|
||||
itemDicts = applyGroupScopeFilter(itemDicts, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
return handleIdsMode(managementInterface.db, FileItem, pagination, recordFilter)
|
||||
|
||||
result = managementInterface.getAllFiles(pagination=paginationParams)
|
||||
if not groupByLevels:
|
||||
# No grouping: let DB handle pagination directly (fastest path)
|
||||
result = managementInterface.getAllFiles(pagination=paginationParams)
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem)
|
||||
resp: dict = {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
else:
|
||||
items = result if isinstance(result, list) else (result.items if hasattr(result, "items") else [result])
|
||||
resp = {"items": enrichRowsWithFkLabels(_filesToDicts(items), FileItem), "pagination": None}
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
|
||||
if paginationParams:
|
||||
enriched = applyGroupScopeFilter(enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem), groupCtx.itemIds)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
items = result if isinstance(result, list) else (result.items if hasattr(result, "items") else [result])
|
||||
enriched = applyGroupScopeFilter(enrichRowsWithFkLabels(_filesToDicts(items), FileItem), groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": groupCtx.groupTree}
|
||||
# Strategy B grouping: load full list, group, then slice
|
||||
allFiles = managementInterface.getAllFiles()
|
||||
allItems = enrichRowsWithFkLabels(
|
||||
_filesToDicts(allFiles if isinstance(allFiles, list) else (allFiles.items if hasattr(allFiles, "items") else [])),
|
||||
FileItem,
|
||||
)
|
||||
|
||||
from modules.routes.routeHelpers import apply_strategy_b_filters_and_sort
|
||||
if paginationParams.filters or paginationParams.sort:
|
||||
allItems = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
|
||||
if not paginationParams:
|
||||
resp = {"items": allItems, "pagination": None}
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
|
||||
totalItems = len(allItems)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, groupLayout = buildGroupLayout(allItems, groupByLevels, paginationParams.page, paginationParams.pageSize)
|
||||
|
||||
resp = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
if groupLayout:
|
||||
resp["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
resp["appliedView"] = viewMeta.model_dump()
|
||||
return resp
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
|
|
@ -559,34 +629,11 @@ def get_files(
|
|||
)
|
||||
|
||||
|
||||
def _addFileToGroup(appInterface, fileId: str, groupId: str, contextKey: str = "files/list"):
|
||||
"""Add a file to a group in the persisted groupTree (upsert)."""
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups]
|
||||
def _add(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == groupId:
|
||||
itemIds = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fileId not in itemIds:
|
||||
itemIds.append(fileId)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = itemIds
|
||||
else:
|
||||
nd.itemIds = itemIds
|
||||
return True
|
||||
subs = nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])
|
||||
if _add(subs):
|
||||
return True
|
||||
return False
|
||||
_add(nodes)
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
except Exception as e:
|
||||
logger.warning(f"_addFileToGroup failed: {e}")
|
||||
def _LEGACY_addFileToGroup_REMOVED():
|
||||
"""Removed — file-group tree no longer exists. Use multi-select bulk operations."""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
@router.post("/upload", status_code=status.HTTP_201_CREATED)
|
||||
|
|
@ -596,7 +643,6 @@ async def upload_file(
|
|||
file: UploadFile = File(...),
|
||||
workflowId: Optional[str] = Form(None),
|
||||
featureInstanceId: Optional[str] = Form(None),
|
||||
groupId: Optional[str] = Form(None),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> JSONResponse:
|
||||
|
|
@ -630,12 +676,6 @@ async def upload_file(
|
|||
managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId})
|
||||
fileItem.featureInstanceId = featureInstanceId
|
||||
|
||||
# Add to group if groupId was provided
|
||||
if groupId:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
_addFileToGroup(appInterface, fileItem.id, groupId)
|
||||
|
||||
# Determine response message based on duplicate type
|
||||
if duplicateType == "exact_duplicate":
|
||||
message = f"File '{file.filename}' already exists with identical content. Reusing existing file."
|
||||
|
|
@ -843,82 +883,68 @@ def batchDownload(
|
|||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ── Group bulk endpoints ──────────────────────────────────────────────────────
|
||||
# ── Bulk file operations (replace former group-based bulk routes) ─────────────
|
||||
|
||||
def _get_group_item_ids(contextKey: str, groupId: str, appInterface) -> set:
|
||||
"""Collect all file IDs in a group and its sub-groups from the stored groupTree."""
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return set()
|
||||
nodes = [n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups]
|
||||
result = _collectItemIds(nodes, groupId)
|
||||
return result or set()
|
||||
except Exception as e:
|
||||
logger.error(f"_get_group_item_ids failed for groupId={groupId}: {e}")
|
||||
return set()
|
||||
|
||||
|
||||
@router.patch("/groups/{groupId}/scope")
|
||||
@limiter.limit("60/minute")
|
||||
def patch_group_scope(
|
||||
@router.post("/bulk/scope")
|
||||
@limiter.limit("30/minute")
|
||||
def bulk_set_scope(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Set scope for all files in a group (recursive)."""
|
||||
scope = body.get("scope")
|
||||
if not scope:
|
||||
raise HTTPException(status_code=400, detail="scope is required")
|
||||
"""Set scope for a list of files by their IDs."""
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
scope: str = body.get("scope") or ""
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
validScopes = {"personal", "featureInstance", "mandate", "global"}
|
||||
if scope not in validScopes:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid scope. Must be one of {validScopes}")
|
||||
if scope == "global" and not context.isSysAdmin:
|
||||
raise HTTPException(status_code=403, detail="Only sysadmins can set global scope")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
updated = 0
|
||||
for fid in fileIds:
|
||||
try:
|
||||
managementInterface.updateFile(fid, {"scope": scope})
|
||||
updated += 1
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_scope: failed to update file {fid}: {e}")
|
||||
return {"groupId": groupId, "scope": scope, "filesUpdated": updated}
|
||||
logger.error(f"bulk_set_scope: failed for file {fid}: {e}")
|
||||
return {"scope": scope, "filesUpdated": updated}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_scope error: {e}")
|
||||
logger.error(f"bulk_set_scope error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.patch("/groups/{groupId}/neutralize")
|
||||
@limiter.limit("60/minute")
|
||||
def patch_group_neutralize(
|
||||
@router.post("/bulk/neutralize")
|
||||
@limiter.limit("30/minute")
|
||||
def bulk_set_neutralize(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Toggle neutralize for all files in a group (recursive, incl. knowledge purge/reindex)."""
|
||||
"""Set neutralize flag for a list of files by their IDs (incl. knowledge purge/reindex)."""
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
neutralize = body.get("neutralize")
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
if neutralize is None:
|
||||
raise HTTPException(status_code=400, detail="neutralize is required")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
updated = 0
|
||||
for fid in fileIds:
|
||||
try:
|
||||
|
|
@ -929,39 +955,37 @@ def patch_group_neutralize(
|
|||
kIface = interfaceDbKnowledge.getInterface(currentUser)
|
||||
kIface.purgeFileKnowledge(fid)
|
||||
except Exception as ke:
|
||||
logger.warning(f"patch_group_neutralize: knowledge purge failed for {fid}: {ke}")
|
||||
logger.warning(f"bulk_set_neutralize: knowledge purge failed for {fid}: {ke}")
|
||||
updated += 1
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_neutralize: failed for file {fid}: {e}")
|
||||
return {"groupId": groupId, "neutralize": neutralize, "filesUpdated": updated}
|
||||
logger.error(f"bulk_set_neutralize: failed for file {fid}: {e}")
|
||||
return {"neutralize": neutralize, "filesUpdated": updated}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"patch_group_neutralize error: {e}")
|
||||
logger.error(f"bulk_set_neutralize error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/groups/{groupId}/download")
|
||||
@limiter.limit("20/minute")
|
||||
async def download_group_zip(
|
||||
@router.post("/bulk/download-zip")
|
||||
@limiter.limit("10/minute")
|
||||
async def bulk_download_zip(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Download all files in a group as a ZIP archive."""
|
||||
"""Download a list of files as a ZIP archive."""
|
||||
import io, zipfile
|
||||
fileIds: list = body.get("fileIds") or []
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=400, detail="fileIds is required")
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
if not fileIds:
|
||||
raise HTTPException(status_code=404, detail="Group not found or empty")
|
||||
buf = io.BytesIO()
|
||||
with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for fid in fileIds:
|
||||
|
|
@ -969,63 +993,21 @@ async def download_group_zip(
|
|||
fileMeta = managementInterface.getFile(fid)
|
||||
fileData = managementInterface.getFileData(fid)
|
||||
if fileMeta and fileData:
|
||||
name = (fileMeta.get("fileName") if isinstance(fileMeta, dict) else getattr(fileMeta, "fileName", fid)) or fid
|
||||
name = (getattr(fileMeta, "fileName", None) or fid)
|
||||
zf.writestr(name, fileData)
|
||||
except Exception as fe:
|
||||
logger.warning(f"download_group_zip: skipping file {fid}: {fe}")
|
||||
logger.warning(f"bulk_download_zip: skipping file {fid}: {fe}")
|
||||
buf.seek(0)
|
||||
from fastapi.responses import StreamingResponse
|
||||
return StreamingResponse(
|
||||
buf,
|
||||
media_type="application/zip",
|
||||
headers={"Content-Disposition": f'attachment; filename="group-{groupId}.zip"'},
|
||||
headers={"Content-Disposition": 'attachment; filename="files.zip"'},
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"download_group_zip error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/groups/{groupId}")
|
||||
@limiter.limit("30/minute")
|
||||
def delete_group(
|
||||
request: Request,
|
||||
groupId: str = Path(..., description="Group ID"),
|
||||
deleteItems: bool = Query(False, description="If true, also delete all files in the group"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Remove a group from the groupTree. Optionally delete all its files."""
|
||||
try:
|
||||
import modules.interfaces.interfaceDbApp as _appIface
|
||||
appInterface = _appIface.getInterface(currentUser)
|
||||
fileIds = _get_group_item_ids("files/list", groupId, appInterface)
|
||||
# Remove group from tree
|
||||
existing = appInterface.getTableGrouping("files/list")
|
||||
if existing:
|
||||
from modules.routes.routeHelpers import _removeGroupFromTree
|
||||
newRoots = _removeGroupFromTree([n.model_dump() if hasattr(n, 'model_dump') else n for n in existing.rootGroups], groupId)
|
||||
appInterface.upsertTableGrouping("files/list", newRoots)
|
||||
# Optionally delete files
|
||||
deletedFiles = 0
|
||||
if deleteItems:
|
||||
managementInterface = interfaceDbManagement.getInterface(
|
||||
currentUser,
|
||||
mandateId=str(context.mandateId) if context.mandateId else None,
|
||||
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
|
||||
)
|
||||
for fid in fileIds:
|
||||
try:
|
||||
managementInterface.deleteFile(fid)
|
||||
deletedFiles += 1
|
||||
except Exception as e:
|
||||
logger.error(f"delete_group: failed to delete file {fid}: {e}")
|
||||
return {"groupId": groupId, "deletedFiles": deletedFiles}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"delete_group error: {e}")
|
||||
logger.error(f"bulk_download_zip error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -131,11 +131,9 @@ def get_mandates(
|
|||
handleFilterValuesInMemory, handleIdsInMemory,
|
||||
handleFilterValuesMode, handleIdsMode,
|
||||
parseCrossFilterPagination,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
)
|
||||
|
||||
appInterface = interfaceDbApp.getRootInterface()
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, "mandates")
|
||||
|
||||
def _mandateItemsForAdmin():
|
||||
items = []
|
||||
|
|
@ -154,23 +152,18 @@ def get_mandates(
|
|||
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(mandateItems, column, pagination)
|
||||
return handleFilterValuesInMemory(_mandateItemsForAdmin(), column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
if isPlatformAdmin:
|
||||
return handleIdsMode(appInterface.db, Mandate, pagination)
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return handleIdsInMemory(mandateItems, pagination)
|
||||
return handleIdsInMemory(_mandateItemsForAdmin(), pagination)
|
||||
|
||||
if isPlatformAdmin:
|
||||
result = appInterface.getAllMandates(pagination=paginationParams)
|
||||
items = result.items if hasattr(result, 'items') else (result if isinstance(result, list) else [])
|
||||
items = applyGroupScopeFilter(
|
||||
[i.model_dump() if hasattr(i, 'model_dump') else (i if isinstance(i, dict) else vars(i)) for i in items],
|
||||
groupCtx.itemIds,
|
||||
)
|
||||
items = [i.model_dump() if hasattr(i, 'model_dump') else (i if isinstance(i, dict) else vars(i)) for i in items]
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=items,
|
||||
|
|
@ -182,13 +175,11 @@ def get_mandates(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
),
|
||||
groupTree=groupCtx.groupTree,
|
||||
)
|
||||
else:
|
||||
return PaginatedResponse(items=items, pagination=None, groupTree=groupCtx.groupTree)
|
||||
return PaginatedResponse(items=items, pagination=None)
|
||||
else:
|
||||
mandateItems = applyGroupScopeFilter(_mandateItemsForAdmin(), groupCtx.itemIds)
|
||||
return PaginatedResponse(items=mandateItems, pagination=None, groupTree=groupCtx.groupTree)
|
||||
return PaginatedResponse(items=_mandateItemsForAdmin(), pagination=None)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -3,8 +3,10 @@
|
|||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Query
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import status
|
||||
from fastapi.responses import JSONResponse
|
||||
import logging
|
||||
import json
|
||||
import math
|
||||
|
||||
# Import auth module
|
||||
from modules.auth import limiter, getCurrentUser
|
||||
|
|
@ -46,13 +48,13 @@ def get_prompts(
|
|||
"""
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels,
|
||||
handleGroupingInRequest, applyGroupScopeFilter,
|
||||
resolveView, applyViewToParams, buildGroupLayout, effective_group_by_levels,
|
||||
)
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
from modules.datamodels.datamodelPagination import AppliedViewMeta
|
||||
|
||||
CONTEXT_KEY = "prompts"
|
||||
|
||||
# Parse pagination params early — needed for grouping in all modes
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -64,7 +66,13 @@ def get_prompts(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
appInterface = getAppInterface(currentUser)
|
||||
groupCtx = handleGroupingInRequest(paginationParams, appInterface, CONTEXT_KEY)
|
||||
|
||||
# Resolve view and merge config into params
|
||||
viewKey = paginationParams.viewKey if paginationParams else None
|
||||
viewConfig, viewDisplayName = resolveView(appInterface, CONTEXT_KEY, viewKey)
|
||||
viewMeta = AppliedViewMeta(viewKey=viewKey, displayName=viewDisplayName) if viewKey else None
|
||||
paginationParams = applyViewToParams(paginationParams, viewConfig)
|
||||
groupByLevels = effective_group_by_levels(paginationParams, viewConfig)
|
||||
|
||||
def _promptsToEnrichedDicts(promptItems):
|
||||
dicts = [r.model_dump() if hasattr(r, 'model_dump') else (dict(r) if not isinstance(r, dict) else r) for r in promptItems]
|
||||
|
|
@ -73,43 +81,98 @@ def get_prompts(
|
|||
|
||||
managementInterface = interfaceDbManagement.getInterface(currentUser)
|
||||
|
||||
if mode == "groupSummary":
|
||||
if not pagination:
|
||||
raise HTTPException(status_code=400, detail="pagination required for groupSummary")
|
||||
from modules.routes.routeHelpers import (
|
||||
apply_strategy_b_filters_and_sort,
|
||||
build_group_summary_groups,
|
||||
)
|
||||
if not groupByLevels or not groupByLevels[0].get("field"):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="groupByLevels[0].field required for groupSummary",
|
||||
)
|
||||
field = groupByLevels[0]["field"]
|
||||
null_label = str(groupByLevels[0].get("nullLabel") or "—")
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
allItems = _promptsToEnrichedDicts(
|
||||
result if isinstance(result, list) else (result.items if hasattr(result, "items") else [])
|
||||
)
|
||||
filtered = apply_strategy_b_filters_and_sort(allItems, paginationParams, currentUser)
|
||||
groups_out = build_group_summary_groups(filtered, field, null_label)
|
||||
return JSONResponse(content={"groups": groups_out})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
items = _promptsToEnrichedDicts(result)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
return handleFilterValuesInMemory(_promptsToEnrichedDicts(result), column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
items = _promptsToEnrichedDicts(result)
|
||||
items = applyGroupScopeFilter(items, groupCtx.itemIds)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
return handleIdsInMemory(_promptsToEnrichedDicts(result), pagination)
|
||||
|
||||
result = managementInterface.getAllPrompts(pagination=paginationParams)
|
||||
if not groupByLevels:
|
||||
# No grouping: let DB handle pagination directly
|
||||
result = managementInterface.getAllPrompts(pagination=paginationParams)
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
response: dict = {
|
||||
"items": _promptsToEnrichedDicts(result.items),
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
else:
|
||||
response = {"items": _promptsToEnrichedDicts(result if isinstance(result, list) else [result]), "pagination": None}
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
if paginationParams:
|
||||
items = applyGroupScopeFilter(_promptsToEnrichedDicts(result.items), groupCtx.itemIds)
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
items = applyGroupScopeFilter(_promptsToEnrichedDicts(result), groupCtx.itemIds)
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": None,
|
||||
"groupTree": groupCtx.groupTree,
|
||||
}
|
||||
# Strategy B grouping: load all, filter+sort in-memory, group, then slice
|
||||
result = managementInterface.getAllPrompts(pagination=None)
|
||||
allItems = _promptsToEnrichedDicts(result if isinstance(result, list) else (result.items if hasattr(result, 'items') else []))
|
||||
|
||||
if not paginationParams:
|
||||
response = {"items": allItems, "pagination": None}
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
if paginationParams.filters or paginationParams.sort:
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(currentUser)
|
||||
if paginationParams.filters:
|
||||
allItems = comp._applyFilters(allItems, paginationParams.filters)
|
||||
if paginationParams.sort:
|
||||
allItems = comp._applySorting(allItems, paginationParams.sort)
|
||||
|
||||
totalItems = len(allItems)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
page_items, groupLayout = buildGroupLayout(allItems, groupByLevels, paginationParams.page, paginationParams.pageSize)
|
||||
|
||||
response = {
|
||||
"items": page_items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
}
|
||||
if groupLayout:
|
||||
response["groupLayout"] = groupLayout.model_dump()
|
||||
if viewMeta:
|
||||
response["appliedView"] = viewMeta.model_dump()
|
||||
return response
|
||||
|
||||
|
||||
@router.post("", response_model=Prompt)
|
||||
|
|
|
|||
|
|
@ -208,7 +208,6 @@ def get_users(
|
|||
- GET /api/users/ (no pagination - returns all users in mandate)
|
||||
- GET /api/users/?pagination={"page":1,"pageSize":10,"sort":[]}
|
||||
"""
|
||||
# Parse pagination early — needed for grouping in all modes
|
||||
_paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -219,10 +218,6 @@ def get_users(
|
|||
except (json.JSONDecodeError, ValueError) as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
from modules.routes.routeHelpers import handleGroupingInRequest as _handleGrouping, applyGroupScopeFilter as _applyGroupScope
|
||||
_appInterfaceForGrouping = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
|
||||
_groupCtx = _handleGrouping(_paginationParams, _appInterfaceForGrouping, "users")
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
|
|
@ -233,14 +228,12 @@ def get_users(
|
|||
|
||||
try:
|
||||
paginationParams = _paginationParams
|
||||
appInterface = _appInterfaceForGrouping
|
||||
|
||||
if context.mandateId:
|
||||
# Get users for specific mandate using getUsersByMandate
|
||||
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
|
||||
appInterface = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
|
||||
|
||||
if context.mandateId:
|
||||
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(result.items), User), _groupCtx.itemIds)
|
||||
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -251,18 +244,14 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
users = result if isinstance(result, list) else result.items if hasattr(result, 'items') else []
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(users), User), _groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enrichRowsWithFkLabels(_usersToDicts(users), User), "pagination": None}
|
||||
elif context.isPlatformAdmin:
|
||||
# PlatformAdmin without mandateId — DB-level pagination via interface
|
||||
result = appInterface.getAllUsers(paginationParams)
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(result.items), User), _groupCtx.itemIds)
|
||||
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -273,18 +262,13 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
|
||||
enriched = _applyGroupScope(enrichRowsWithFkLabels(_usersToDicts(users), User), _groupCtx.itemIds)
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enrichRowsWithFkLabels(_usersToDicts(users), User), "pagination": None}
|
||||
else:
|
||||
# Non-SysAdmin without mandateId: aggregate users across all admin mandates
|
||||
rootInterface = getRootInterface()
|
||||
userMandates = rootInterface.getUserMandates(str(context.user.id))
|
||||
|
||||
# Find mandates where user has admin role
|
||||
adminMandateIds = []
|
||||
for um in userMandates:
|
||||
umId = getattr(um, 'id', None)
|
||||
|
|
@ -297,13 +281,10 @@ def get_users(
|
|||
if role and role.roleLabel == "admin" and not role.featureInstanceId:
|
||||
adminMandateIds.append(str(mandateId))
|
||||
break
|
||||
|
||||
|
||||
if not adminMandateIds:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=routeApiMsg("No admin access to any mandate")
|
||||
)
|
||||
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("No admin access to any mandate"))
|
||||
|
||||
from modules.datamodels.datamodelMembership import UserMandate as UserMandateModel
|
||||
allUM = rootInterface.db.getRecordset(UserMandateModel, recordFilter={"mandateId": adminMandateIds})
|
||||
uniqueUserIds = list({
|
||||
|
|
@ -312,13 +293,10 @@ def get_users(
|
|||
if (um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
|
||||
})
|
||||
batchUsers = rootInterface.getUsersByIds(uniqueUserIds) if uniqueUserIds else {}
|
||||
allUsers = [
|
||||
u.model_dump() if hasattr(u, 'model_dump') else vars(u)
|
||||
for u in batchUsers.values()
|
||||
]
|
||||
|
||||
allUsers = [u.model_dump() if hasattr(u, 'model_dump') else vars(u) for u in batchUsers.values()]
|
||||
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort as _applyFiltersAndSortHelper
|
||||
filteredUsers = _applyGroupScope(_applyFiltersAndSortHelper(allUsers, paginationParams), _groupCtx.itemIds)
|
||||
filteredUsers = _applyFiltersAndSortHelper(allUsers, paginationParams)
|
||||
enriched = enrichRowsWithFkLabels(filteredUsers, User)
|
||||
|
||||
if paginationParams:
|
||||
|
|
@ -327,7 +305,6 @@ def get_users(
|
|||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
endIdx = startIdx + paginationParams.pageSize
|
||||
|
||||
return {
|
||||
"items": enriched[startIdx:endIdx],
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -338,10 +315,9 @@ def get_users(
|
|||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
).model_dump(),
|
||||
"groupTree": _groupCtx.groupTree,
|
||||
}
|
||||
else:
|
||||
return {"items": enriched, "pagination": None, "groupTree": _groupCtx.groupTree}
|
||||
return {"items": enriched, "pagination": None}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -704,154 +704,260 @@ def paginateInMemory(
|
|||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Table Grouping helpers
|
||||
# View resolution and Strategy B grouping engine
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
from dataclasses import dataclass, field as dc_field
|
||||
|
||||
|
||||
@dataclass
|
||||
class GroupingContext:
|
||||
def resolveView(interface, contextKey: str, viewKey: Optional[str]):
|
||||
"""
|
||||
Result of handleGroupingInRequest.
|
||||
Carries the group tree for the response and the resolved item-ID set for
|
||||
group-scope filtering (None = no active group scope).
|
||||
Load a TableListView for the current user and contextKey.
|
||||
|
||||
Returns (config_dict, display_name):
|
||||
- (None, None) when viewKey is None / empty
|
||||
- (config, str | None) otherwise — config may be {}; display_name from the row
|
||||
|
||||
Raises HTTPException(404) when viewKey is explicitly set but the view
|
||||
does not exist (prevents silent fallback to ungrouped behaviour).
|
||||
"""
|
||||
groupTree: Optional[list] # List[TableGroupNode] serialised as dicts — for response
|
||||
itemIds: Optional[set] # Set[str] when groupId was set, else None
|
||||
from fastapi import HTTPException
|
||||
if not viewKey:
|
||||
return None, None
|
||||
try:
|
||||
view = interface.getTableListView(contextKey=contextKey, viewKey=viewKey)
|
||||
except Exception as e:
|
||||
logger.warning(f"resolveView: store lookup failed for key={viewKey!r} context={contextKey!r}: {e}")
|
||||
view = None
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewKey}' not found for context '{contextKey}'")
|
||||
cfg = view.config or {}
|
||||
dname = getattr(view, "displayName", None) or None
|
||||
return cfg, dname
|
||||
|
||||
|
||||
def _collectItemIds(nodes: list, groupId: str) -> Optional[set]:
|
||||
def effective_group_by_levels(
|
||||
pagination_params: Optional["PaginationParams"],
|
||||
view_config: Optional[dict],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Recursively search *nodes* for a node whose id == groupId and collect
|
||||
all itemIds from it and all its descendant subGroups.
|
||||
Returns None if the group is not found.
|
||||
Choose grouping levels for this request.
|
||||
|
||||
If the client sends ``groupByLevels`` (including ``[]``), it wins over the
|
||||
saved view. If the key is omitted (``None``), use the view's levels.
|
||||
"""
|
||||
for node in nodes:
|
||||
nodeId = node.get("id") if isinstance(node, dict) else getattr(node, "id", None)
|
||||
if nodeId == groupId:
|
||||
ids: set = set()
|
||||
_collectAllIds(node, ids)
|
||||
return ids
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
result = _collectItemIds(subGroups, groupId)
|
||||
if result is not None:
|
||||
return result
|
||||
return None
|
||||
if pagination_params is not None:
|
||||
req = getattr(pagination_params, "groupByLevels", None)
|
||||
if req is not None:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for lvl in req:
|
||||
if hasattr(lvl, "model_dump"):
|
||||
out.append(lvl.model_dump())
|
||||
elif isinstance(lvl, dict):
|
||||
out.append(dict(lvl))
|
||||
else:
|
||||
out.append(dict(lvl)) # type: ignore[arg-type]
|
||||
return out
|
||||
vc = (view_config or {}).get("groupByLevels") if view_config else None
|
||||
return list(vc or [])
|
||||
|
||||
|
||||
def _collectAllIds(node, ids: set) -> None:
|
||||
"""Collect itemIds from a node and all its descendants into ids."""
|
||||
nodeItemIds = node.get("itemIds", []) if isinstance(node, dict) else getattr(node, "itemIds", [])
|
||||
for iid in nodeItemIds:
|
||||
ids.add(str(iid))
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
for child in subGroups:
|
||||
_collectAllIds(child, ids)
|
||||
|
||||
|
||||
def _removeGroupFromTree(nodes: list, groupId: str) -> list:
|
||||
"""Remove a group node (and all descendants) from the tree by id."""
|
||||
result = []
|
||||
for node in nodes:
|
||||
nodeId = node.get("id") if isinstance(node, dict) else getattr(node, "id", None)
|
||||
if nodeId == groupId:
|
||||
continue # skip this node (remove it)
|
||||
subGroups = node.get("subGroups", []) if isinstance(node, dict) else getattr(node, "subGroups", [])
|
||||
filtered_sub = _removeGroupFromTree(subGroups, groupId)
|
||||
if isinstance(node, dict):
|
||||
node = {**node, "subGroups": filtered_sub}
|
||||
result.append(node)
|
||||
return result
|
||||
|
||||
|
||||
def handleGroupingInRequest(
|
||||
paginationParams: Optional[PaginationParams],
|
||||
interface,
|
||||
contextKey: str,
|
||||
) -> GroupingContext:
|
||||
def applyViewToParams(params: Optional["PaginationParams"], viewConfig: Optional[dict]) -> Optional["PaginationParams"]:
|
||||
"""
|
||||
Central grouping handler — call at the start of every list route that
|
||||
supports table grouping.
|
||||
Merge a view's saved configuration into PaginationParams.
|
||||
|
||||
Steps (in order):
|
||||
1. If paginationParams.saveGroupTree is set:
|
||||
persist the new tree via interface.upsertTableGrouping, then clear
|
||||
saveGroupTree from paginationParams so it is not treated as a filter.
|
||||
2. Load the current group tree from the DB (used in step 3 and response).
|
||||
3. If paginationParams.groupId is set:
|
||||
resolve it to a Set[str] of itemIds (including all sub-groups),
|
||||
then clear groupId from paginationParams so it is not treated as a
|
||||
normal filter field.
|
||||
4. Return a GroupingContext with groupTree (for the response) and itemIds
|
||||
(for applyGroupScopeFilter).
|
||||
Priority: explicit request fields win over view defaults.
|
||||
- sort: use request sort if non-empty, otherwise view sort
|
||||
- filters: deep-merge (request filters win per-key)
|
||||
- pageSize: use request value (already set by normalize_pagination_dict)
|
||||
|
||||
The caller does NOT need to handle any grouping logic itself — just call
|
||||
applyGroupScopeFilter(items, groupCtx.itemIds) and embed groupCtx.groupTree
|
||||
in the response dict.
|
||||
Returns the (mutated) params, or a new minimal PaginationParams when
|
||||
params is None (so callers always get a valid object).
|
||||
"""
|
||||
from modules.datamodels.datamodelPagination import TableGroupNode
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, SortField
|
||||
if not viewConfig:
|
||||
return params
|
||||
|
||||
groupTree = None
|
||||
itemIds = None
|
||||
if params is None:
|
||||
params = PaginationParams(page=1, pageSize=25)
|
||||
|
||||
if paginationParams is None:
|
||||
# Sort: request wins if non-empty
|
||||
if not params.sort and viewConfig.get("sort"):
|
||||
try:
|
||||
existing = interface.getTableGrouping(contextKey)
|
||||
if existing:
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in existing.rootGroups]
|
||||
params.sort = [
|
||||
SortField(**s) if isinstance(s, dict) else s
|
||||
for s in viewConfig["sort"]
|
||||
]
|
||||
except Exception as e:
|
||||
logger.warning(f"handleGroupingInRequest: getTableGrouping failed: {e}")
|
||||
return GroupingContext(groupTree=groupTree, itemIds=None)
|
||||
logger.warning(f"applyViewToParams: could not parse view sort: {e}")
|
||||
|
||||
# Step 1: persist saveGroupTree if present
|
||||
if paginationParams.saveGroupTree is not None:
|
||||
try:
|
||||
saved = interface.upsertTableGrouping(contextKey, paginationParams.saveGroupTree)
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in saved.rootGroups]
|
||||
except Exception as e:
|
||||
logger.error(f"handleGroupingInRequest: upsertTableGrouping failed: {e}")
|
||||
paginationParams.saveGroupTree = None
|
||||
# Filters: deep-merge (request filters take priority per-key)
|
||||
viewFilters = viewConfig.get("filters") or {}
|
||||
if viewFilters:
|
||||
merged = dict(viewFilters)
|
||||
if params.filters:
|
||||
merged.update(params.filters)
|
||||
params.filters = merged
|
||||
|
||||
# Step 2: load current tree (only if not already set from save above)
|
||||
if groupTree is None:
|
||||
try:
|
||||
existing = interface.getTableGrouping(contextKey)
|
||||
if existing:
|
||||
groupTree = [n.model_dump() if hasattr(n, "model_dump") else n for n in existing.rootGroups]
|
||||
except Exception as e:
|
||||
logger.warning(f"handleGroupingInRequest: getTableGrouping failed: {e}")
|
||||
return params
|
||||
|
||||
# Step 3: resolve groupId to itemIds set
|
||||
if paginationParams.groupId is not None:
|
||||
targetGroupId = paginationParams.groupId
|
||||
paginationParams.groupId = None # remove so it is not treated as a normal filter
|
||||
if groupTree:
|
||||
itemIds = _collectItemIds(groupTree, targetGroupId)
|
||||
if itemIds is None:
|
||||
logger.warning(
|
||||
f"handleGroupingInRequest: groupId={targetGroupId!r} not found in tree "
|
||||
f"for contextKey={contextKey!r} — returning empty set"
|
||||
)
|
||||
itemIds = set() # unknown group → show nothing rather than everything
|
||||
|
||||
def apply_strategy_b_filters_and_sort(
|
||||
items: List[Dict[str, Any]],
|
||||
pagination_params: Optional[PaginationParams],
|
||||
current_user: Any,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Shared in-memory filter + sort pass for Strategy B (files/prompts/connections lists).
|
||||
"""
|
||||
if not pagination_params:
|
||||
return list(items)
|
||||
from modules.interfaces.interfaceDbManagement import ComponentObjects
|
||||
|
||||
comp = ComponentObjects()
|
||||
comp.setUserContext(current_user)
|
||||
out = list(items)
|
||||
if pagination_params.filters:
|
||||
out = comp._applyFilters(out, pagination_params.filters)
|
||||
if pagination_params.sort:
|
||||
out = comp._applySorting(out, pagination_params.sort)
|
||||
return out
|
||||
|
||||
|
||||
def build_group_summary_groups(
|
||||
items: List[Dict[str, Any]],
|
||||
field: str,
|
||||
null_label: str = "—",
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Build {"value", "label", "totalCount"} for mode=groupSummary (single grouping level).
|
||||
"""
|
||||
from collections import defaultdict
|
||||
|
||||
counts: Dict[str, int] = defaultdict(int)
|
||||
display_by_key: Dict[str, str] = {}
|
||||
null_key = "\x00NULL"
|
||||
label_attr = f"{field}Label"
|
||||
|
||||
for item in items:
|
||||
raw = item.get(field)
|
||||
if raw is None or raw == "":
|
||||
nk = null_key
|
||||
display = null_label
|
||||
else:
|
||||
# groupId sent but no tree saved yet → return empty (nothing belongs to any group)
|
||||
logger.warning(
|
||||
f"handleGroupingInRequest: groupId={targetGroupId!r} set but no tree exists "
|
||||
f"for contextKey={contextKey!r} — returning empty set"
|
||||
)
|
||||
itemIds = set()
|
||||
nk = str(raw)
|
||||
display = None
|
||||
lbl = item.get(label_attr)
|
||||
if lbl is not None and lbl != "":
|
||||
display = str(lbl)
|
||||
if display is None:
|
||||
display = nk
|
||||
counts[nk] += 1
|
||||
if nk not in display_by_key:
|
||||
display_by_key[nk] = display
|
||||
|
||||
return GroupingContext(groupTree=groupTree, itemIds=itemIds)
|
||||
ordered_keys = sorted(
|
||||
counts.keys(),
|
||||
key=lambda x: (x == null_key, str(display_by_key.get(x, x)).lower()),
|
||||
)
|
||||
return [
|
||||
{
|
||||
"value": None if nk == null_key else nk,
|
||||
"label": display_by_key.get(nk, nk),
|
||||
"totalCount": counts[nk],
|
||||
}
|
||||
for nk in ordered_keys
|
||||
]
|
||||
|
||||
|
||||
def applyGroupScopeFilter(items: List[Dict[str, Any]], itemIds: Optional[set]) -> List[Dict[str, Any]]:
|
||||
def buildGroupLayout(
|
||||
all_items: List[Dict[str, Any]],
|
||||
groupByLevels: List[Dict[str, Any]],
|
||||
page: int,
|
||||
pageSize: int,
|
||||
) -> tuple:
|
||||
"""
|
||||
Filter items to those whose "id" field is in itemIds.
|
||||
Returns items unchanged when itemIds is None (no active group scope).
|
||||
Works for both normal list items and for mode=ids / mode=filterValues flows
|
||||
— call it before handleIdsInMemory / handleFilterValuesInMemory.
|
||||
Apply multi-level grouping to all_items, slice to the requested page,
|
||||
and return (page_items, GroupLayout | None).
|
||||
|
||||
Strategy B: grouping operates on the full filtered+sorted candidate list.
|
||||
Items are stably re-sorted by the group path so that members of the same
|
||||
group are always contiguous (preserving the existing per-group sort order
|
||||
from the caller).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
all_items: fully filtered and user-sorted list of row dicts.
|
||||
groupByLevels: list of {"field": str, "nullLabel": str, "direction": "asc"|"desc"} dicts.
|
||||
page, pageSize: 1-based page index and page size.
|
||||
|
||||
Returns
|
||||
-------
|
||||
(page_items, GroupLayout | None)
|
||||
"""
|
||||
if itemIds is None:
|
||||
return items
|
||||
return [item for item in items if str(item.get("id", "")) in itemIds]
|
||||
from functools import cmp_to_key
|
||||
from modules.datamodels.datamodelPagination import GroupBand, GroupLayout
|
||||
|
||||
if not groupByLevels:
|
||||
offset = (page - 1) * pageSize
|
||||
return all_items[offset:offset + pageSize], None
|
||||
|
||||
levels = [lvl.get("field", "") for lvl in groupByLevels if lvl.get("field")]
|
||||
if not levels:
|
||||
offset = (page - 1) * pageSize
|
||||
return all_items[offset:offset + pageSize], None
|
||||
|
||||
nullLabels = {lvl.get("field", ""): lvl.get("nullLabel", "—") for lvl in groupByLevels}
|
||||
|
||||
def _path_key(item: dict) -> tuple:
|
||||
return tuple(
|
||||
str(item.get(f) or "") if item.get(f) is not None else nullLabels.get(f, "—")
|
||||
for f in levels
|
||||
)
|
||||
|
||||
def _item_cmp(a: dict, b: dict) -> int:
|
||||
pa, pb = _path_key(a), _path_key(b)
|
||||
for i in range(len(levels)):
|
||||
if pa[i] != pb[i]:
|
||||
asc = (groupByLevels[i].get("direction") or "asc").lower() != "desc"
|
||||
if pa[i] < pb[i]:
|
||||
return -1 if asc else 1
|
||||
return 1 if asc else -1
|
||||
return 0
|
||||
|
||||
# Sort by group path (per-level asc/desc); order within same path stays stable in Py3.12+
|
||||
all_items.sort(key=cmp_to_key(_item_cmp))
|
||||
|
||||
# Build global band list from the full sorted list
|
||||
bands_global: List[dict] = []
|
||||
current_path: Optional[tuple] = None
|
||||
current_start = 0
|
||||
for i, item in enumerate(all_items):
|
||||
path = _path_key(item)
|
||||
if path != current_path:
|
||||
if current_path is not None:
|
||||
bands_global.append({"path": list(current_path), "startIdx": current_start, "endIdx": i})
|
||||
current_path = path
|
||||
current_start = i
|
||||
if current_path is not None:
|
||||
bands_global.append({"path": list(current_path), "startIdx": current_start, "endIdx": len(all_items)})
|
||||
|
||||
# Slice to page
|
||||
page_start = (page - 1) * pageSize
|
||||
page_end = page_start + pageSize
|
||||
page_items = all_items[page_start:page_end]
|
||||
|
||||
# Find bands that have at least one row on this page
|
||||
bands_on_page: List[GroupBand] = []
|
||||
for band in bands_global:
|
||||
inter_start = max(band["startIdx"], page_start)
|
||||
inter_end = min(band["endIdx"], page_end)
|
||||
if inter_start >= inter_end:
|
||||
continue
|
||||
path_list = band["path"]
|
||||
bands_on_page.append(GroupBand(
|
||||
path=path_list,
|
||||
label=path_list[-1] if path_list else "—",
|
||||
startRowIndex=inter_start - page_start,
|
||||
rowCount=inter_end - inter_start,
|
||||
))
|
||||
|
||||
group_layout = GroupLayout(levels=levels, bands=bands_on_page) if bands_on_page else GroupLayout(levels=levels, bands=[])
|
||||
return page_items, group_layout
|
||||
|
|
|
|||
177
modules/routes/routeTableViews.py
Normal file
177
modules/routes/routeTableViews.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
CRUD endpoints for saved table views (TableListView).
|
||||
|
||||
A view stores a named preset of filters, sort order, and groupByLevels for a
|
||||
specific table (identified by contextKey). Views are per-user and optionally
|
||||
per-mandate.
|
||||
|
||||
Route prefix: /api/table-views
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Request
|
||||
from fastapi import status
|
||||
|
||||
from modules.auth import limiter, getCurrentUser
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelPagination import TableListView
|
||||
import modules.interfaces.interfaceDbApp as interfaceDbApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/table-views",
|
||||
tags=["Table Views"],
|
||||
responses={404: {"description": "Not found"}},
|
||||
)
|
||||
|
||||
|
||||
def _ownedOrRaise(view: Optional[TableListView], viewId: str, userId: str):
|
||||
"""Raise 404 when view is missing; ownership is implicitly guaranteed by the
|
||||
interface layer (views are always queried with the current userId)."""
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewId}' not found")
|
||||
return view
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# List views for a context
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("")
|
||||
@limiter.limit("60/minute")
|
||||
def list_views(
|
||||
request: Request,
|
||||
contextKey: str = Query(..., description="Table context key, e.g. 'connections', 'files/list'"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""List all saved views for the current user and contextKey."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
views = iface.getTableListViews(contextKey=contextKey)
|
||||
return [v.model_dump() if hasattr(v, "model_dump") else v for v in views]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Get one view
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/{viewKey}")
|
||||
@limiter.limit("60/minute")
|
||||
def get_view(
|
||||
request: Request,
|
||||
viewKey: str = Path(..., description="View slug"),
|
||||
contextKey: str = Query(..., description="Table context key"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""Return a single saved view by its viewKey."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
view = iface.getTableListView(contextKey=contextKey, viewKey=viewKey)
|
||||
if view is None:
|
||||
raise HTTPException(status_code=404, detail=f"View '{viewKey}' not found for context '{contextKey}'")
|
||||
return view.model_dump() if hasattr(view, "model_dump") else view
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Create a view
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("", status_code=status.HTTP_201_CREATED)
|
||||
@limiter.limit("30/minute")
|
||||
def create_view(
|
||||
request: Request,
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""
|
||||
Create a new saved view.
|
||||
|
||||
Body fields:
|
||||
- contextKey (required): table context key
|
||||
- viewKey (required): short slug, unique per (user, contextKey)
|
||||
- displayName (required): human-readable label
|
||||
- config (optional): view config dict with keys:
|
||||
schemaVersion, filters, sort, groupByLevels
|
||||
"""
|
||||
contextKey = body.get("contextKey")
|
||||
viewKey = body.get("viewKey")
|
||||
displayName = body.get("displayName")
|
||||
config = body.get("config") or {}
|
||||
|
||||
if not contextKey:
|
||||
raise HTTPException(status_code=400, detail="contextKey is required")
|
||||
if not viewKey:
|
||||
raise HTTPException(status_code=400, detail="viewKey is required")
|
||||
if not displayName:
|
||||
raise HTTPException(status_code=400, detail="displayName is required")
|
||||
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
try:
|
||||
view = iface.createTableListView(
|
||||
contextKey=contextKey,
|
||||
viewKey=viewKey,
|
||||
displayName=displayName,
|
||||
config=config,
|
||||
)
|
||||
return view.model_dump() if hasattr(view, "model_dump") else view
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=409, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"create_view failed: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to create view")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Update a view (by id)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.put("/{viewId}")
|
||||
@limiter.limit("30/minute")
|
||||
def update_view(
|
||||
request: Request,
|
||||
viewId: str = Path(..., description="View primary-key id (not viewKey)"),
|
||||
body: dict = Body(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""
|
||||
Update an existing view.
|
||||
|
||||
Updatable fields: displayName, viewKey, config.
|
||||
The contextKey cannot be changed after creation.
|
||||
"""
|
||||
allowed = {"displayName", "viewKey", "config"}
|
||||
updates = {k: v for k, v in body.items() if k in allowed}
|
||||
if not updates:
|
||||
raise HTTPException(status_code=400, detail=f"No updatable fields provided. Allowed: {allowed}")
|
||||
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
try:
|
||||
updated = iface.updateTableListView(viewId=viewId, updates=updates)
|
||||
except Exception as e:
|
||||
logger.error(f"update_view failed: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to update view")
|
||||
|
||||
if updated is None:
|
||||
raise HTTPException(status_code=404, detail=f"View id='{viewId}' not found")
|
||||
return updated.model_dump() if hasattr(updated, "model_dump") else updated
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Delete a view (by id)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.delete("/{viewId}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@limiter.limit("30/minute")
|
||||
def delete_view(
|
||||
request: Request,
|
||||
viewId: str = Path(..., description="View primary-key id"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
):
|
||||
"""Delete a saved view by its primary-key id."""
|
||||
iface = interfaceDbApp.getInterface(currentUser)
|
||||
deleted = iface.deleteTableListView(viewId=viewId)
|
||||
if not deleted:
|
||||
raise HTTPException(status_code=404, detail=f"View id='{viewId}' not found or could not be deleted")
|
||||
|
|
@ -61,34 +61,8 @@ async def _getOrCreateInstanceGroup(
|
|||
featureInstanceId: str,
|
||||
contextKey: str = "files/list",
|
||||
) -> Optional[str]:
|
||||
"""Return groupId of the default group for a feature instance; create if needed."""
|
||||
try:
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [
|
||||
n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
for n in (existing.rootGroups if existing else [])
|
||||
]
|
||||
|
||||
def _find(nds):
|
||||
for nd in nds:
|
||||
meta = nd.get("meta", {}) if isinstance(nd, dict) else getattr(nd, "meta", {})
|
||||
if (meta or {}).get("featureInstanceId") == featureInstanceId:
|
||||
return nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
found = _find(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []))
|
||||
if found:
|
||||
return found
|
||||
return None
|
||||
|
||||
found = _find(nodes)
|
||||
if found:
|
||||
return found
|
||||
newId = str(uuid.uuid4())
|
||||
nodes.append({"id": newId, "name": featureInstanceId, "itemIds": [], "subGroups": [], "meta": {"featureInstanceId": featureInstanceId}})
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return newId
|
||||
except Exception as e:
|
||||
logger.error(f"_getOrCreateInstanceGroup: {e}")
|
||||
return None
|
||||
"""Stub — file group tree removed. Returns None; callers that checked the result will skip group assignment."""
|
||||
return None
|
||||
|
||||
|
||||
async def _getOrCreateTempGroup(
|
||||
|
|
@ -96,8 +70,8 @@ async def _getOrCreateTempGroup(
|
|||
sessionId: str,
|
||||
contextKey: str = "files/list",
|
||||
) -> Optional[str]:
|
||||
"""Return groupId of a temporary group for a session; create if needed."""
|
||||
return await _getOrCreateInstanceGroup(appInterface, f"_temp_{sessionId}", contextKey)
|
||||
"""Stub — file group tree removed. Returns None."""
|
||||
return None
|
||||
|
||||
|
||||
def _attachFileAsChatDocument(
|
||||
|
|
|
|||
|
|
@ -312,52 +312,7 @@ def _registerWorkspaceTools(registry: ToolRegistry, services):
|
|||
fiId = context.get("featureInstanceId") or (services.featureInstanceId if services else "")
|
||||
if fiId:
|
||||
dbMgmt.updateFile(fileItem.id, {"featureInstanceId": fiId})
|
||||
if args.get("groupId"):
|
||||
try:
|
||||
appIface = chatService.interfaceDbApp
|
||||
existing = appIface.getTableGrouping("files/list")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
def _addToGroup(nds, gid, fid):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == gid:
|
||||
ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fid not in ids:
|
||||
ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = ids
|
||||
return True
|
||||
if _addToGroup(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []), gid, fid):
|
||||
return True
|
||||
return False
|
||||
_addToGroup(nodes, args["groupId"], fileItem.id)
|
||||
appIface.upsertTableGrouping("files/list", nodes)
|
||||
except Exception as _ge:
|
||||
logger.warning(f"writeFile: failed to add file to group {args['groupId']}: {_ge}")
|
||||
elif fiId:
|
||||
try:
|
||||
appIface = chatService.interfaceDbApp
|
||||
instanceGroupId = await _getOrCreateInstanceGroup(appIface, fiId)
|
||||
if instanceGroupId:
|
||||
existing = appIface.getTableGrouping("files/list")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
def _addToGroup2(nds, gid, fid):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == gid:
|
||||
ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
if fid not in ids:
|
||||
ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = ids
|
||||
return True
|
||||
if _addToGroup2(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", []), gid, fid):
|
||||
return True
|
||||
return False
|
||||
_addToGroup2(nodes, instanceGroupId, fileItem.id)
|
||||
appIface.upsertTableGrouping("files/list", nodes)
|
||||
except Exception as _ge:
|
||||
logger.warning(f"writeFile: failed to add file to instance group for {fiId}: {_ge}")
|
||||
# File group tree removed — groupId arg and instance-group assignment no longer apply
|
||||
if args.get("tags"):
|
||||
dbMgmt.updateFile(fileItem.id, {"tags": args["tags"]})
|
||||
|
||||
|
|
@ -746,136 +701,7 @@ def _registerWorkspaceTools(registry: ToolRegistry, services):
|
|||
readOnly=False
|
||||
)
|
||||
|
||||
# ---- Group tools (replaces folder-based tools) ----
|
||||
|
||||
async def _listGroups(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
try:
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=True, data="No groups found.")
|
||||
|
||||
def _flatten(nodes, depth=0):
|
||||
result = []
|
||||
for n in nodes:
|
||||
nd = n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
result.append({"id": nd.get("id"), "name": nd.get("name"), "depth": depth, "itemCount": len(nd.get("itemIds", []))})
|
||||
result.extend(_flatten(nd.get("subGroups", []), depth + 1))
|
||||
return result
|
||||
|
||||
groups = _flatten(existing.rootGroups)
|
||||
lines = "\n".join(
|
||||
f"{' ' * g['depth']}- {g['name']} (id: {g['id']}, items: {g['itemCount']})"
|
||||
for g in groups
|
||||
) if groups else "No groups found."
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=True, data=lines)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="listGroups", success=False, error=str(e))
|
||||
|
||||
async def _listItemsInGroup(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
groupId = args.get("groupId", "")
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
if not groupId:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=False, error="groupId is required")
|
||||
try:
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=True, data="No groups found.")
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in existing.rootGroups]
|
||||
ids = _collectItemIds(nodes, groupId)
|
||||
itemList = list(ids) if ids else []
|
||||
return ToolResult(
|
||||
toolCallId="", toolName="listItemsInGroup", success=True,
|
||||
data="\n".join(f"- {fid}" for fid in itemList) if itemList else "No items in group.",
|
||||
)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="listItemsInGroup", success=False, error=str(e))
|
||||
|
||||
async def _addItemsToGroup(args: Dict[str, Any], context: Dict[str, Any]):
|
||||
groupId = args.get("groupId", "")
|
||||
itemIds = args.get("itemIds", [])
|
||||
contextKey = args.get("contextKey", "files/list")
|
||||
if not groupId:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error="groupId is required")
|
||||
if not itemIds:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error="itemIds is required")
|
||||
try:
|
||||
chatService = services.chat
|
||||
appInterface = chatService.interfaceDbApp
|
||||
existing = appInterface.getTableGrouping(contextKey)
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in (existing.rootGroups if existing else [])]
|
||||
|
||||
def _add(nds):
|
||||
for nd in nds:
|
||||
nid = nd.get("id") if isinstance(nd, dict) else getattr(nd, "id", None)
|
||||
if nid == groupId:
|
||||
existing_ids = list(nd.get("itemIds", []) if isinstance(nd, dict) else getattr(nd, "itemIds", []))
|
||||
for fid in itemIds:
|
||||
if fid not in existing_ids:
|
||||
existing_ids.append(fid)
|
||||
if isinstance(nd, dict):
|
||||
nd["itemIds"] = existing_ids
|
||||
return True
|
||||
if _add(nd.get("subGroups", []) if isinstance(nd, dict) else getattr(nd, "subGroups", [])):
|
||||
return True
|
||||
return False
|
||||
|
||||
found = _add(nodes)
|
||||
if not found:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error=f"Group {groupId} not found")
|
||||
appInterface.upsertTableGrouping(contextKey, nodes)
|
||||
return ToolResult(
|
||||
toolCallId="", toolName="addItemsToGroup", success=True,
|
||||
data=f"Added {len(itemIds)} item(s) to group {groupId}",
|
||||
)
|
||||
except Exception as e:
|
||||
return ToolResult(toolCallId="", toolName="addItemsToGroup", success=False, error=str(e))
|
||||
|
||||
registry.register(
|
||||
"listGroups", _listGroups,
|
||||
description="List all groups in the file grouping tree. Groups replace folders for organising files.",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
}
|
||||
},
|
||||
readOnly=True
|
||||
)
|
||||
|
||||
registry.register(
|
||||
"listItemsInGroup", _listItemsInGroup,
|
||||
description="List all file IDs assigned to a specific group (includes sub-groups recursively).",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"groupId": {"type": "string", "description": "The group ID to inspect"},
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
},
|
||||
"required": ["groupId"]
|
||||
},
|
||||
readOnly=True
|
||||
)
|
||||
|
||||
registry.register(
|
||||
"addItemsToGroup", _addItemsToGroup,
|
||||
description="Add one or more file IDs to an existing group.",
|
||||
parameters={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"groupId": {"type": "string", "description": "The group ID to add files to"},
|
||||
"itemIds": {"type": "array", "items": {"type": "string"}, "description": "List of file IDs to add"},
|
||||
"contextKey": {"type": "string", "description": "Grouping context key (default: 'files/list')"},
|
||||
},
|
||||
"required": ["groupId", "itemIds"]
|
||||
},
|
||||
readOnly=False
|
||||
)
|
||||
# Group tree tools removed — file grouping now uses view-based display grouping (TableListView)
|
||||
|
||||
registry.register(
|
||||
"replaceInFile", _replaceInFile,
|
||||
|
|
|
|||
|
|
@ -523,34 +523,12 @@ class ChatService:
|
|||
return results
|
||||
|
||||
def listGroups(self, contextKey: str = "files/list") -> list:
|
||||
"""List all groups in the groupTree for the current context."""
|
||||
try:
|
||||
existing = self.interfaceDbApp.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return []
|
||||
def _flatten(nodes, depth=0):
|
||||
result = []
|
||||
for n in nodes:
|
||||
nd = n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n))
|
||||
result.append({"id": nd.get("id"), "name": nd.get("name"), "depth": depth, "itemCount": len(nd.get("itemIds", []))})
|
||||
result.extend(_flatten(nd.get("subGroups", []), depth + 1))
|
||||
return result
|
||||
return _flatten(existing.rootGroups)
|
||||
except Exception as e:
|
||||
return []
|
||||
"""Stub — file group tree removed. Returns empty list."""
|
||||
return []
|
||||
|
||||
def listFilesInGroup(self, groupId: str, contextKey: str = "files/list") -> list:
|
||||
"""List file IDs in a specific group (recursive)."""
|
||||
try:
|
||||
from modules.routes.routeHelpers import _collectItemIds
|
||||
existing = self.interfaceDbApp.getTableGrouping(contextKey)
|
||||
if not existing:
|
||||
return []
|
||||
nodes = [n.model_dump() if hasattr(n, "model_dump") else (n if isinstance(n, dict) else vars(n)) for n in existing.rootGroups]
|
||||
ids = _collectItemIds(nodes, groupId)
|
||||
return list(ids) if ids else []
|
||||
except Exception:
|
||||
return []
|
||||
"""Stub — file group tree removed. Returns empty list."""
|
||||
return []
|
||||
|
||||
# ---- DataSource CRUD ----
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue