diff --git a/modules/datamodels/datamodelUam.py b/modules/datamodels/datamodelUam.py index dc7824f6..9278ceb3 100644 --- a/modules/datamodels/datamodelUam.py +++ b/modules/datamodels/datamodelUam.py @@ -197,7 +197,6 @@ class UserConnection(PowerOnModel): json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"}, ) - @computed_field @computed_field @property def connectionReference(self) -> str: diff --git a/modules/interfaces/interfaceDbBilling.py b/modules/interfaces/interfaceDbBilling.py index af696ab1..d703293c 100644 --- a/modules/interfaces/interfaceDbBilling.py +++ b/modules/interfaces/interfaceDbBilling.py @@ -1542,16 +1542,40 @@ class BillingObjects: if not accountIds: return PaginatedResult(items=[], totalItems=0, totalPages=0) - recordFilter: Dict[str, Any] = {"accountId": accountIds} - if userId: - recordFilter["createdByUserId"] = userId + # Extract free-text search term and run a custom query that covers + # enriched columns (mandateName, userName) and the numeric amount + # column. The generic SQL search only covers TEXT columns of the + # BillingTransaction table, which excludes these fields. + searchTerm: Optional[str] = None + if mappedPagination and mappedPagination.filters: + raw = mappedPagination.filters.get("search") + if isinstance(raw, str) and raw.strip(): + searchTerm = raw.strip() - result = self.db.getRecordsetPaginated( - BillingTransaction, - pagination=mappedPagination, - recordFilter=recordFilter, - ) - pageItems = result.get("items", []) if isinstance(result, dict) else result.items + if searchTerm: + searchResult = self._searchTransactionsPaginated( + allAccounts=allAccounts, + accountIds=accountIds, + userId=userId, + searchTerm=searchTerm, + pagination=mappedPagination, + ) + pageItems = searchResult["items"] + totalItems = searchResult["totalItems"] + totalPages = searchResult["totalPages"] + else: + recordFilter: Dict[str, Any] = {"accountId": accountIds} + if userId: + recordFilter["createdByUserId"] = userId + + result = self.db.getRecordsetPaginated( + BillingTransaction, + pagination=mappedPagination, + recordFilter=recordFilter, + ) + pageItems = result.get("items", []) if isinstance(result, dict) else result.items + totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems + totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages accountMap = {a.get("id"): a for a in allAccounts} @@ -1594,15 +1618,186 @@ class BillingObjects: row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None enriched.append(row) - totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems - totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages - return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages) except Exception as e: logger.error(f"Error in getTransactionsForMandatesPaginated: {e}") return PaginatedResult(items=[], totalItems=0, totalPages=0) + def _searchTransactionsPaginated( + self, + allAccounts: List[Dict[str, Any]], + accountIds: List[str], + userId: Optional[str], + searchTerm: str, + pagination: PaginationParams, + ) -> Dict[str, Any]: + """ + Custom paginated search for BillingTransaction that also covers the + enriched columns `mandateName` and `userName` as well as the numeric + `amount` column. Resolves matching mandate/user IDs via the app DB + first, then builds a single SQL query with OR-combined conditions. + """ + import math + from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields + from modules.datamodels.datamodelUam import UserInDB + from modules.interfaces.interfaceDbApp import getInterface as getAppInterface + + table = BillingTransaction.__name__ + fields = _get_model_fields(BillingTransaction) + pattern = f"%{searchTerm}%" + + # Resolve matching user / mandate IDs via the app DB (which is separate + # from the billing DB and hosts UserInDB / Mandate tables). + matchingUserIds: List[str] = [] + matchingMandateIds: List[str] = [] + try: + appInterface = getAppInterface(self.currentUser) + appInterface.db._ensure_connection() + with appInterface.db.connection.cursor() as cur: + if appInterface.db._ensureTableExists(UserInDB): + cur.execute( + 'SELECT "id" FROM "UserInDB" WHERE ' + 'COALESCE("username", \'\') ILIKE %s OR ' + 'COALESCE("fullName", \'\') ILIKE %s OR ' + 'COALESCE("email", \'\') ILIKE %s', + (pattern, pattern, pattern), + ) + matchingUserIds = [r["id"] for r in cur.fetchall() if r.get("id")] + + if appInterface.db._ensureTableExists(Mandate): + cur.execute( + 'SELECT "id" FROM "Mandate" WHERE ' + 'COALESCE("label", \'\') ILIKE %s OR ' + 'COALESCE("name", \'\') ILIKE %s', + (pattern, pattern), + ) + matchingMandateIds = [r["id"] for r in cur.fetchall() if r.get("id")] + except Exception as e: + logger.warning(f"_searchTransactionsPaginated: user/mandate resolution failed: {e}") + + matchingAccountIds = [ + a.get("id") for a in allAccounts + if a.get("id") and a.get("mandateId") in set(matchingMandateIds) + ] + + # Try to interpret the search term as a number for amount matching. + amountVal: Optional[float] = None + try: + amountVal = float(searchTerm.replace(",", ".")) + except Exception: + amountVal = None + + whereParts: List[str] = ['"accountId" = ANY(%s)'] + whereValues: List[Any] = [accountIds] + if userId: + whereParts.append('"createdByUserId" = %s') + whereValues.append(userId) + + # Apply non-search filters from pagination (reuse existing builder for + # everything except the `search` key which we handle explicitly). + import copy + paginationWithoutSearch = copy.deepcopy(pagination) if pagination else None + if paginationWithoutSearch and paginationWithoutSearch.filters: + paginationWithoutSearch.filters = { + k: v for k, v in paginationWithoutSearch.filters.items() if k != "search" + } + + orParts: List[str] = [] + orValues: List[Any] = [] + + textCols = [c for c, t in fields.items() if t == "TEXT"] + for col in textCols: + orParts.append(f'COALESCE("{col}"::TEXT, \'\') ILIKE %s') + orValues.append(pattern) + + if matchingUserIds: + orParts.append('"createdByUserId" = ANY(%s)') + orValues.append(matchingUserIds) + if matchingAccountIds: + orParts.append('"accountId" = ANY(%s)') + orValues.append(matchingAccountIds) + + orParts.append('"amount"::TEXT ILIKE %s') + orValues.append(pattern) + if amountVal is not None: + orParts.append('"amount" = %s') + orValues.append(amountVal) + + whereParts.append(f"({' OR '.join(orParts)})") + whereValues.extend(orValues) + + # Apply remaining structured filters via the generic helper by feeding + # it a dummy pagination that does NOT include LIMIT/OFFSET. We only + # need the WHERE contribution for the non-search filters here. + extraWhere = "" + extraValues: List[Any] = [] + if paginationWithoutSearch and paginationWithoutSearch.filters: + try: + fromPagination = copy.deepcopy(paginationWithoutSearch) + fromPagination.sort = [] + fromPagination.page = 1 + fromPagination.pageSize = 1 + ew, _, _, values, _ = self.db._buildPaginationClauses( + BillingTransaction, fromPagination, recordFilter=None + ) + if ew: + extraWhere = ew.replace(" WHERE ", " AND ", 1) + extraValues = list(values) + except Exception as e: + logger.warning(f"_searchTransactionsPaginated: extra-filter build failed: {e}") + + whereClause = " WHERE " + " AND ".join(whereParts) + extraWhere + whereValues.extend(extraValues) + + # Build ORDER BY from pagination.sort + validColumns = set(fields.keys()) + orderParts: List[str] = [] + if pagination and pagination.sort: + for sf in pagination.sort: + sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None) + sfDir = sf.get("direction", "asc") if isinstance(sf, dict) else getattr(sf, "direction", "asc") + if sfField and sfField in validColumns: + direction = "DESC" if str(sfDir).lower() == "desc" else "ASC" + colType = fields.get(sfField, "TEXT") + if colType == "BOOLEAN": + orderParts.append(f'COALESCE("{sfField}", FALSE) {direction}') + else: + orderParts.append(f'"{sfField}" {direction} NULLS LAST') + if not orderParts: + orderParts.append('"id"') + orderClause = " ORDER BY " + ", ".join(orderParts) + + pageSize = pagination.pageSize if pagination else 50 + page = pagination.page if pagination else 1 + offset = (page - 1) * pageSize + limitClause = f" LIMIT {pageSize} OFFSET {offset}" + + try: + self.db._ensure_connection() + with self.db.connection.cursor() as cur: + countSql = f'SELECT COUNT(*) FROM "{table}"{whereClause}' + cur.execute(countSql, whereValues) + totalItems = cur.fetchone()["count"] + + dataSql = f'SELECT * FROM "{table}"{whereClause}{orderClause}{limitClause}' + cur.execute(dataSql, whereValues) + records = [dict(row) for row in cur.fetchall()] + + for rec in records: + _parseRecordFields(rec, fields, f"search table {table}") + + totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0 + return {"items": records, "totalItems": totalItems, "totalPages": totalPages} + + except Exception as e: + logger.error(f"_searchTransactionsPaginated SQL error: {e}", exc_info=True) + try: + self.db.connection.rollback() + except Exception: + pass + return {"items": [], "totalItems": 0, "totalPages": 0} + def _buildScopeFilter( self, mandateIds: Optional[List[str]], diff --git a/modules/interfaces/interfaceDbManagement.py b/modules/interfaces/interfaceDbManagement.py index 96b64e95..957e8e11 100644 --- a/modules/interfaces/interfaceDbManagement.py +++ b/modules/interfaces/interfaceDbManagement.py @@ -1091,29 +1091,32 @@ class ComponentObjects: return newfileName counter += 1 - def createFile(self, name: str, mimeType: str, content: bytes) -> FileItem: + def createFile(self, name: str, mimeType: str, content: bytes, folderId: Optional[str] = None) -> FileItem: """Creates a new file entry if user has permission. Computes fileHash and fileSize from content. - + Duplicate check: if a file with the same user + fileHash + fileName already exists, the existing file is returned instead of creating a new one. Same hash with different name is allowed (intentional copy by user). + + Args: + folderId: Optional parent folder ID. None/empty means the root folder. """ if not self.checkRbacPermission(FileItem, "create"): raise PermissionError("No permission to create files") - + # Compute file size and hash fileSize = len(content) fileHash = hashlib.sha256(content).hexdigest() - + # Duplicate check: same user + same hash + same fileName → return existing existingFile = self.checkForDuplicateFile(fileHash, name) if existingFile: logger.info(f"Duplicate file detected in createFile: '{name}' (hash={fileHash[:12]}...) for user {self.userId} — returning existing file {existingFile.id}") return existingFile - + # Ensure fileName is unique uniqueName = self._generateUniquefileName(name) - + mandateId = self.mandateId or "" featureInstanceId = self.featureInstanceId or "" @@ -1124,6 +1127,11 @@ class ComponentObjects: else: scope = "personal" + # Normalize folderId: treat empty string as "no folder" (= root) – NULL in DB + normalizedFolderId: Optional[str] = folderId + if isinstance(normalizedFolderId, str) and not normalizedFolderId.strip(): + normalizedFolderId = None + fileItem = FileItem( mandateId=mandateId, featureInstanceId=featureInstanceId, @@ -1132,7 +1140,7 @@ class ComponentObjects: mimeType=mimeType, fileSize=fileSize, fileHash=fileHash, - folderId="", + folderId=normalizedFolderId, ) # Store in database @@ -1846,39 +1854,44 @@ class ComponentObjects: logger.error(f"Error getting file content: {str(e)}") return None - def saveUploadedFile(self, fileContent: bytes, fileName: str) -> tuple[FileItem, str]: - """Saves an uploaded file if user has permission.""" + def saveUploadedFile(self, fileContent: bytes, fileName: str, folderId: Optional[str] = None) -> tuple[FileItem, str]: + """Saves an uploaded file if user has permission. + + Args: + folderId: Optional parent folder ID. None means root folder. + """ try: # Check file creation permission if not self.checkRbacPermission(FileItem, "create"): raise PermissionError("No permission to upload files") - - logger.debug(f"Starting upload process for file: {fileName}") - + + logger.debug(f"Starting upload process for file: {fileName} (folderId={folderId!r})") + if not isinstance(fileContent, bytes): logger.error(f"Invalid fileContent type: {type(fileContent)}") raise ValueError(f"fileContent must be bytes, got {type(fileContent)}") - + # Compute file hash to check for duplicates before any DB writes fileHash = hashlib.sha256(fileContent).hexdigest() - + # Duplicate check: same user + same fileHash + same fileName → return existing file # Same hash with different name is allowed (intentional copy by user) existingFile = self.checkForDuplicateFile(fileHash, fileName) if existingFile: logger.info(f"Duplicate detected for user {self.userId}: '{fileName}' with hash {fileHash[:12]}... — returning existing file {existingFile.id}") return existingFile, "exact_duplicate" - + # Determine MIME type mimeType = self.getMimeType(fileName) - + # createFile handles its own duplicate check (for calls from other code paths) # Here we already checked, so this will create a new file logger.debug(f"Saving file metadata to database for file: {fileName}") fileItem = self.createFile( name=fileName, mimeType=mimeType, - content=fileContent + content=fileContent, + folderId=folderId, ) # Save binary data diff --git a/modules/interfaces/interfaceRbac.py b/modules/interfaces/interfaceRbac.py index b8a87ba9..14953ef1 100644 --- a/modules/interfaces/interfaceRbac.py +++ b/modules/interfaces/interfaceRbac.py @@ -393,6 +393,13 @@ def getRecordsetPaginatedWithRBAC( continue if key not in validColumns: continue + if val is None: + # val=None in pagination.filters means "match empty/null" + # (same convention as connectorDbPostgre._buildPaginationClauses). + # Covers both historical empty-string values and true NULLs + # e.g. root-folder files where folderId may be "" or NULL. + whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')') + continue if isinstance(val, dict): op = val.get("operator", "equals") v = val.get("value", "") @@ -569,6 +576,13 @@ def getDistinctColumnValuesWithRBAC( continue if key not in validColumns: continue + if val is None: + # val=None in pagination.filters means "match empty/null" + # (same convention as connectorDbPostgre._buildPaginationClauses). + # Covers both historical empty-string values and true NULLs + # e.g. root-folder files where folderId may be "" or NULL. + whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')') + continue if isinstance(val, dict): op = val.get("operator", "equals") v = val.get("value", "") diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py index 73123988..290be722 100644 --- a/modules/routes/routeDataConnections.py +++ b/modules/routes/routeDataConnections.py @@ -427,14 +427,54 @@ def update_connection( detail=routeApiMsg("Connection not found") ) - # Update connection fields + # Merge incoming changes into a dict and re-validate via pydantic. + # Direct setattr() bypasses type coercion (PowerOnModel doesn't enable + # validate_assignment), which leaves enum fields as raw strings and + # later breaks .value access. Also filters out computed / unknown keys. + writableFields = set(UserConnection.model_fields.keys()) + previous = connection.model_dump() + merged = dict(previous) for field, value in connection_data.items(): - if hasattr(connection, field): - setattr(connection, field, value) - - # Update lastChecked timestamp using UTC timestamp - connection.lastChecked = getUtcTimestamp() - + if field in writableFields: + merged[field] = value + merged["lastChecked"] = getUtcTimestamp() + connection = UserConnection.model_validate(merged) + + # If this is a remote (non-local) connection and any identity-bearing + # field changed, the stored OAuth tokens no longer match the account. + # Force the user to reconnect: mark PENDING and revoke existing tokens. + identityFields = ("externalUsername", "externalEmail", "externalId", "authority") + authorityValue = ( + connection.authority.value + if hasattr(connection.authority, "value") + else str(connection.authority) + ) + isRemote = authorityValue != AuthAuthority.LOCAL.value + identityChanged = any( + previous.get(field) != merged.get(field) for field in identityFields + ) + if isRemote and identityChanged: + connection.status = ConnectionStatus.PENDING + connection.expiresAt = None + try: + existingTokens = interface.db.getRecordset( + Token, recordFilter={"connectionId": connectionId} + ) + for token in existingTokens: + interface.revokeTokenById( + token["id"], + revokedBy=currentUser.id, + reason="connection identity changed", + ) + logger.info( + f"Revoked {len(existingTokens)} token(s) for connection " + f"{connectionId} after identity change; reconnect required." + ) + except Exception as e: + logger.warning( + f"Failed to revoke tokens for connection {connectionId}: {str(e)}" + ) + # Update connection - models now handle timestamp serialization automatically interface.db.recordModify(UserConnection, connectionId, connection.model_dump()) diff --git a/modules/routes/routeDataFiles.py b/modules/routes/routeDataFiles.py index 544f0085..439bfce5 100644 --- a/modules/routes/routeDataFiles.py +++ b/modules/routes/routeDataFiles.py @@ -243,8 +243,16 @@ def get_files( recordFilter = None if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters: - fVal = paginationParams.filters.pop("folderId") - recordFilter = {"folderId": fVal} + fVal = paginationParams.filters.get("folderId") + # For a concrete folderId we use recordFilter (exact equality). + # For null / empty (= "root") we keep it in pagination.filters so the + # connector applies `IS NULL OR = ''` – files predating the folderId + # fix were stored with an empty string instead of NULL. + if fVal is None or (isinstance(fVal, str) and fVal.strip() == ""): + paginationParams.filters["folderId"] = None + else: + paginationParams.filters.pop("folderId") + recordFilter = {"folderId": fVal} result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter) @@ -282,13 +290,19 @@ async def upload_file( file: UploadFile = File(...), workflowId: Optional[str] = Form(None), featureInstanceId: Optional[str] = Form(None), - currentUser: User = Depends(getCurrentUser) + folderId: Optional[str] = Form(None), + currentUser: User = Depends(getCurrentUser), + context: RequestContext = Depends(getRequestContext), ) -> JSONResponse: # Add fileName property to UploadFile for consistency with backend model file.fileName = file.filename """Upload a file""" try: - managementInterface = interfaceDbManagement.getInterface(currentUser) + managementInterface = interfaceDbManagement.getInterface( + currentUser, + mandateId=str(context.mandateId) if context.mandateId else None, + featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None, + ) # Read file fileContent = await file.read() @@ -301,12 +315,29 @@ async def upload_file( detail=f"File too large. Maximum size: {interfaceDbManagement.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB" ) + # Normalize folderId: empty string / "null" / "root" → None (root folder) + normalizedFolderId: Optional[str] = folderId + if isinstance(normalizedFolderId, str): + trimmed = normalizedFolderId.strip() + if not trimmed or trimmed.lower() in {"null", "none", "root"}: + normalizedFolderId = None + else: + normalizedFolderId = trimmed + # Save file via LucyDOM interface in the database - fileItem, duplicateType = managementInterface.saveUploadedFile(fileContent, file.filename) + fileItem, duplicateType = managementInterface.saveUploadedFile( + fileContent, file.filename, folderId=normalizedFolderId + ) if featureInstanceId and not fileItem.featureInstanceId: managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId}) fileItem.featureInstanceId = featureInstanceId + + # For exact duplicates we keep the existing record, but move it into the + # target folder so the user actually sees their upload land where they expect. + if duplicateType == "exact_duplicate" and normalizedFolderId != getattr(fileItem, "folderId", None): + managementInterface.updateFile(fileItem.id, {"folderId": normalizedFolderId}) + fileItem.folderId = normalizedFolderId # Determine response message based on duplicate type if duplicateType == "exact_duplicate": diff --git a/modules/serviceCenter/services/serviceAgent/coreTools/_connectionTools.py b/modules/serviceCenter/services/serviceAgent/coreTools/_connectionTools.py index 7073429f..b0381da2 100644 --- a/modules/serviceCenter/services/serviceAgent/coreTools/_connectionTools.py +++ b/modules/serviceCenter/services/serviceAgent/coreTools/_connectionTools.py @@ -9,6 +9,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry from modules.serviceCenter.services.serviceAgent.coreTools._helpers import ( + _buildResolverDbFromServices, _getOrCreateTempFolder, _looksLikeBinary, _resolveFileScope, @@ -22,20 +23,6 @@ def _registerConnectionTools(registry: ToolRegistry, services): """Auto-extracted from registerCoreTools.""" # ---- Connection tools (external data sources) ---- - def _buildResolverDb(): - """Build a DB adapter that ConnectorResolver can use to load UserConnections. - interfaceDbApp has getUserConnectionById; ConnectorResolver expects getUserConnection.""" - chatService = services.chat - appIf = getattr(chatService, "interfaceDbApp", None) - if appIf and hasattr(appIf, "getUserConnectionById"): - class _Adapter: - def __init__(self, app): - self._app = app - def getUserConnection(self, connectionId: str): - return self._app.getUserConnectionById(connectionId) - return _Adapter(appIf) - return getattr(chatService, "interfaceDbComponent", None) - async def _listConnections(args: Dict[str, Any], context: Dict[str, Any]): try: chatService = services.chat @@ -49,7 +36,12 @@ def _registerConnectionTools(registry: ToolRegistry, services): authorityVal = authority.value if hasattr(authority, "value") else str(authority) username = conn.get("externalUsername", "") if isinstance(conn, dict) else getattr(conn, "externalUsername", "") email = conn.get("externalEmail", "") if isinstance(conn, dict) else getattr(conn, "externalEmail", "") - lines.append(f"- connectionId: {connId} | {authorityVal} | {username} ({email})") + cid = conn.get("id", "") if isinstance(conn, dict) else getattr(conn, "id", "") + ref = f"connection:{authorityVal}:{username}" + lines.append( + f"- {ref} connectionId={cid} ({email}) " + f"(use this full connection: line or connectionId as connectionReference)" + ) return ToolResult(toolCallId="", toolName="listConnections", success=True, data="\n".join(lines)) except Exception as e: return ToolResult(toolCallId="", toolName="listConnections", success=False, error=str(e)) @@ -65,7 +57,7 @@ def _registerConnectionTools(registry: ToolRegistry, services): from modules.connectors.connectorResolver import ConnectorResolver resolver = ConnectorResolver( services.getService("security"), - _buildResolverDb(), + _buildResolverDbFromServices(services), ) adapter = await resolver.resolveService(connectionId, service) chatService = services.chat @@ -115,7 +107,7 @@ def _registerConnectionTools(registry: ToolRegistry, services): from modules.connectors.connectorResolver import ConnectorResolver resolver = ConnectorResolver( services.getService("security"), - _buildResolverDb(), + _buildResolverDbFromServices(services), ) adapter = await resolver.resolveService(connectionId, "outlook") diff --git a/modules/serviceCenter/services/serviceAgent/coreTools/_dataSourceTools.py b/modules/serviceCenter/services/serviceAgent/coreTools/_dataSourceTools.py index 2396560e..de64de5f 100644 --- a/modules/serviceCenter/services/serviceAgent/coreTools/_dataSourceTools.py +++ b/modules/serviceCenter/services/serviceAgent/coreTools/_dataSourceTools.py @@ -9,6 +9,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry from modules.serviceCenter.services.serviceAgent.coreTools._helpers import ( + _buildResolverDbFromServices, _getOrCreateTempFolder, _looksLikeBinary, _resolveFileScope, @@ -88,7 +89,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services): from modules.connectors.connectorResolver import ConnectorResolver resolver = ConnectorResolver( services.getService("security"), - _buildResolverDb(), + _buildResolverDbFromServices(services), ) adapter = await resolver.resolveService(connectionId, service) entries = await adapter.browse(browsePath, filter=args.get("filter")) @@ -124,7 +125,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services): from modules.connectors.connectorResolver import ConnectorResolver resolver = ConnectorResolver( services.getService("security"), - _buildResolverDb(), + _buildResolverDbFromServices(services), ) adapter = await resolver.resolveService(connectionId, service) entries = await adapter.search(query, path=basePath) @@ -160,7 +161,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services): fullPath = filePath if filePath.startswith("/") else f"{basePath.rstrip('/')}/{filePath}" resolver = ConnectorResolver( services.getService("security"), - _buildResolverDb(), + _buildResolverDbFromServices(services), ) adapter = await resolver.resolveService(connectionId, service) result = await adapter.download(fullPath) diff --git a/modules/serviceCenter/services/serviceAgent/coreTools/_featureSubAgentTools.py b/modules/serviceCenter/services/serviceAgent/coreTools/_featureSubAgentTools.py index 51c3c3d5..ede4298b 100644 --- a/modules/serviceCenter/services/serviceAgent/coreTools/_featureSubAgentTools.py +++ b/modules/serviceCenter/services/serviceAgent/coreTools/_featureSubAgentTools.py @@ -213,13 +213,9 @@ def _registerFeatureSubAgentTools(registry: ToolRegistry, services): "queryFeatureInstance", _queryFeatureInstance, description=( "Query data from a feature instance (e.g. Trustee, CommCoach). " - "Delegates to a specialized sub-agent that knows the feature's data schema " - "and can browse, filter, and aggregate its tables. Use this when the user " - "has attached feature data sources or asks about feature-specific data.\n\n" - "GUIDELINES:\n" - "- Ask a precise, self-contained question (include all context the sub-agent needs).\n" - "- Combine related data needs into ONE call instead of multiple small ones.\n" - "- Avoid calling this tool repeatedly with slight variations of the same question." + "Delegates to a sub-agent that knows the feature schema. " + "Requires the feature instance id from attached feature data sources. " + "Ask one precise, self-contained question per call." ), parameters={ "type": "object", diff --git a/modules/serviceCenter/services/serviceAgent/coreTools/_helpers.py b/modules/serviceCenter/services/serviceAgent/coreTools/_helpers.py index c8793775..6919ca18 100644 --- a/modules/serviceCenter/services/serviceAgent/coreTools/_helpers.py +++ b/modules/serviceCenter/services/serviceAgent/coreTools/_helpers.py @@ -3,7 +3,7 @@ """Shared helpers for core agent tools (file scope, binary detection, temp folder).""" import logging -from typing import Optional +from typing import Any, Optional logger = logging.getLogger(__name__) @@ -77,3 +77,23 @@ def _getOrCreateTempFolder(chatService) -> Optional[str]: logger.warning(f"Could not get/create Temp folder: {e}") return None + +def _buildResolverDbFromServices(services: Any): + """DB adapter for ConnectorResolver: load UserConnections by id. + + interfaceDbApp exposes getUserConnectionById; ConnectorResolver expects getUserConnection. + """ + chatService = services.chat + appIf = getattr(chatService, "interfaceDbApp", None) + if appIf and hasattr(appIf, "getUserConnectionById"): + + class _Adapter: + def __init__(self, app): + self._app = app + + def getUserConnection(self, connectionId: str): + return self._app.getUserConnectionById(connectionId) + + return _Adapter(appIf) + return getattr(chatService, "interfaceDbComponent", None) + diff --git a/modules/serviceCenter/services/serviceAgent/mainServiceAgent.py b/modules/serviceCenter/services/serviceAgent/mainServiceAgent.py index 3638ccf6..9094e952 100644 --- a/modules/serviceCenter/services/serviceAgent/mainServiceAgent.py +++ b/modules/serviceCenter/services/serviceAgent/mainServiceAgent.py @@ -3,7 +3,7 @@ """Agent service: entry point for running AI agents with tool use.""" import logging -from typing import Any, Callable, Dict, List, Optional, AsyncGenerator +from typing import Any, Callable, Dict, List, Optional, Set, AsyncGenerator from modules.datamodels.datamodelAi import ( AiCallRequest, AiCallOptions, AiCallResponse, OperationTypeEnum @@ -23,6 +23,40 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import ( logger = logging.getLogger(__name__) + +def _toolbox_connection_authorities(services: "_ServicesAdapter") -> List[str]: + """Collect connection authority strings for toolbox gating (requiresConnection). + + The optional ``connection`` service is not always registered; fall back to + ``chat.getUserConnections()`` (same source as workspace UI). + Toolbox entries use ``microsoft`` while UserConnection may store ``msft``. + """ + seen: Set[str] = set() + try: + conn_svc = services.getService("connection") + if conn_svc and hasattr(conn_svc, "getConnections"): + for c in conn_svc.getConnections() or []: + auth = c.get("authority") if isinstance(c, dict) else getattr(c, "authority", None) + val = auth.value if hasattr(auth, "value") else str(auth or "") + if val: + seen.add(val) + except Exception: + pass + try: + chat = services.chat + if chat and hasattr(chat, "getUserConnections"): + for c in chat.getUserConnections() or []: + auth = c.get("authority") if isinstance(c, dict) else getattr(c, "authority", None) + val = auth.value if hasattr(auth, "value") else str(auth or "") + if val: + seen.add(val) + except Exception as e: + logger.debug("toolbox authorities from chat: %s", e) + if "msft" in seen: + seen.add("microsoft") + return list(seen) + + class _ServicesAdapter: """Adapter providing service access from (context, get_service).""" @@ -61,10 +95,33 @@ class _ServicesAdapter: def extraction(self): return self._getService("extraction") + @property + def rbac(self): + """Same RbacClass as workflow hub (MethodBase permission checks during discoverMethods).""" + try: + chat_svc = self.chat + app = getattr(chat_svc, "interfaceDbApp", None) + if app is not None: + return getattr(app, "rbac", None) + except Exception: + return None + return None + def getService(self, name: str): """Access any service by name.""" return self._getService(name) + def __getattr__(self, name: str): + """Resolve e.g. services.clickup for MethodClickup / ActionExecutor (discoverMethods).""" + if name.startswith("_"): + raise AttributeError(name) + try: + return self._getService(name) + except KeyError: + raise AttributeError( + f"{type(self).__name__!r} object has no attribute {name!r}" + ) from None + @property def featureCode(self) -> Optional[str]: w = self.workflow @@ -268,7 +325,12 @@ class AgentService: try: from modules.workflows.processing.shared.methodDiscovery import discoverMethods + discoverMethods(self.services) + except Exception as e: + logger.warning("discoverMethods failed before action tools: %s", e) + + try: from modules.workflows.processing.core.actionExecutor import ActionExecutor actionExecutor = ActionExecutor(self.services) adapter = ActionToolAdapter(actionExecutor) @@ -293,7 +355,7 @@ class AgentService: from modules.serviceCenter.services.serviceAgent.toolboxRegistry import getToolboxRegistry tbRegistry = getToolboxRegistry() - userConnections: List[str] = [] + userConnections: List[str] = _toolbox_connection_authorities(self.services) try: chatService = self.services.chat if hasattr(self.services, "chat") else None if chatService and hasattr(chatService, "getUserConnections"): @@ -301,7 +363,7 @@ class AgentService: for c in connections: authority = c.get("authority", "") if isinstance(c, dict) else getattr(c, "authority", "") authorityVal = authority.value if hasattr(authority, "value") else str(authority) - if authorityVal: + if authorityVal and authorityVal not in userConnections: userConnections.append(authorityVal) except Exception as e: logger.debug("Could not resolve user connections for toolbox activation: %s", e) @@ -386,8 +448,13 @@ class AgentService: except Exception: pass try: - from modules.serviceCenter.services.serviceAgent.actionToolAdapter import ActionToolAdapter + from modules.workflows.processing.shared.methodDiscovery import discoverMethods from modules.workflows.processing.core.actionExecutor import ActionExecutor + from modules.serviceCenter.services.serviceAgent.actionToolAdapter import ( + ActionToolAdapter, + ) + + discoverMethods(self.services) adapter = ActionToolAdapter(ActionExecutor(self.services)) adapter.registerAll(registry) if registry.isValidTool(toolName): diff --git a/modules/serviceCenter/services/serviceAgent/toolboxRegistry.py b/modules/serviceCenter/services/serviceAgent/toolboxRegistry.py index d05cfded..344d6d10 100644 --- a/modules/serviceCenter/services/serviceAgent/toolboxRegistry.py +++ b/modules/serviceCenter/services/serviceAgent/toolboxRegistry.py @@ -173,7 +173,13 @@ def _registerDefaultToolboxes() -> None: requiresConnection="clickup", isDefault=False, tools=[ - "clickup_searchTasks", "clickup_createTask", "clickup_updateTask", + "clickup_listTasks", + "clickup_listFields", + "clickup_searchTasks", + "clickup_getTask", + "clickup_createTask", + "clickup_updateTask", + "clickup_uploadAttachment", ], ), ToolboxDefinition( diff --git a/modules/system/mainSystem.py b/modules/system/mainSystem.py index 3eb0d981..3a361ab9 100644 --- a/modules/system/mainSystem.py +++ b/modules/system/mainSystem.py @@ -92,6 +92,7 @@ NAVIGATION_SECTIONS = [ "icon": "FaLink", "path": "/basedata/connections", "order": 10, + "public": True, }, { "id": "files", @@ -100,6 +101,7 @@ NAVIGATION_SECTIONS = [ "icon": "FaRegFileAlt", "path": "/basedata/files", "order": 20, + "public": True, }, { "id": "prompts", @@ -108,6 +110,7 @@ NAVIGATION_SECTIONS = [ "icon": "FaLightbulb", "path": "/basedata/prompts", "order": 30, + "public": True, }, ], }, diff --git a/modules/workflows/methods/methodClickup/actions/list_fields.py b/modules/workflows/methods/methodClickup/actions/list_fields.py new file mode 100644 index 00000000..851437d7 --- /dev/null +++ b/modules/workflows/methods/methodClickup/actions/list_fields.py @@ -0,0 +1,55 @@ +# Copyright (c) 2025 Patrick Motsch +# All rights reserved. + +import json +import logging +from typing import Any, Dict + +from modules.datamodels.datamodelChat import ActionDocument, ActionResult +from ..helpers.pathparse import parse_team_and_list + +logger = logging.getLogger(__name__) + + +async def list_fields(self, parameters: Dict[str, Any]) -> ActionResult: + """Return ClickUp custom / built-in field definitions for a list (GET /list/{id}/field).""" + connection_reference = parameters.get("connectionReference") + path_query = (parameters.get("pathQuery") or parameters.get("path") or "").strip() + list_id_param = (parameters.get("listId") or "").strip() + + if not connection_reference: + return ActionResult.isFailure(error="connectionReference is required") + + conn = self.connection.get_clickup_connection(connection_reference) + if not conn: + return ActionResult.isFailure(error="No valid ClickUp connection") + + list_id = list_id_param + team_id = "" + if not list_id: + if not path_query: + return ActionResult.isFailure( + error="Provide listId or pathQuery like /team/{teamId}/list/{listId}" + ) + team_id, list_id = parse_team_and_list(path_query) + if not list_id: + return ActionResult.isFailure( + error="path must be /team/{teamId}/list/{listId} (same as list picker / data source path)" + ) + + data = await self.services.clickup.getListFields(list_id) + if isinstance(data, dict) and data.get("error"): + return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or "")) + + doc = ActionDocument( + documentName="clickup_list_fields.json", + documentData=json.dumps(data, ensure_ascii=False, indent=2), + mimeType="application/json", + validationMetadata={ + "actionType": "clickup.listFields", + "teamId": team_id, + "listId": list_id, + "path": path_query or f"/list/{list_id}", + }, + ) + return ActionResult.isSuccess(documents=[doc]) diff --git a/modules/workflows/methods/methodClickup/helpers/connection.py b/modules/workflows/methods/methodClickup/helpers/connection.py index d9b6d4d7..cdcd3601 100644 --- a/modules/workflows/methods/methodClickup/helpers/connection.py +++ b/modules/workflows/methods/methodClickup/helpers/connection.py @@ -3,28 +3,44 @@ """Resolve ClickUp UserConnection and configure ClickupService.""" import logging +import re from typing import Any, Dict, Optional logger = logging.getLogger(__name__) +_UUID_RE = re.compile( + r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", + re.IGNORECASE, +) + class ClickupConnectionHelper: def __init__(self, method_instance: Any): self.method = method_instance self.services = method_instance.services + def _normalize_connection_reference(self, ref: str) -> str: + """Match listConnections / getUserConnectionFromConnectionReference formats.""" + if ref.startswith("connection:"): + return ref + if _UUID_RE.match(ref): + return ref + # LLM often copies "clickup:username" without the connection: prefix + if ":" in ref: + return f"connection:{ref}" + return ref + def get_clickup_connection(self, connection_reference: str) -> Optional[Dict[str, Any]]: try: ref = (connection_reference or "").split(" [")[0].strip() if not ref: return None - user_connection = None - if ref.startswith("connection:"): - user_connection = self.services.chat.getUserConnectionFromConnectionReference(ref) - else: - app = getattr(self.services, "interfaceDbApp", None) - if app and hasattr(app, "getUserConnectionById"): - user_connection = app.getUserConnectionById(ref) + ref = self._normalize_connection_reference(ref) + chat = getattr(self.services, "chat", None) + if not chat or not hasattr(chat, "getUserConnectionFromConnectionReference"): + logger.warning("Chat service missing; cannot resolve ClickUp connection") + return None + user_connection = chat.getUserConnectionFromConnectionReference(ref) if not user_connection: logger.warning("No user connection for reference/id %s", connection_reference) return None diff --git a/modules/workflows/methods/methodClickup/methodClickup.py b/modules/workflows/methods/methodClickup/methodClickup.py index 00c658a5..05eba50d 100644 --- a/modules/workflows/methods/methodClickup/methodClickup.py +++ b/modules/workflows/methods/methodClickup/methodClickup.py @@ -10,6 +10,7 @@ from modules.workflows.methods.methodBase import MethodBase from .helpers.connection import ClickupConnectionHelper from .actions.list_tasks import list_tasks +from .actions.list_fields import list_fields from .actions.search_tasks import search_tasks from .actions.get_task import get_task from .actions.create_task import create_task @@ -67,6 +68,35 @@ class MethodClickup(MethodBase): }, execute=list_tasks.__get__(self, self.__class__), ), + "listFields": WorkflowActionDefinition( + actionId="clickup.listFields", + description="List custom and built-in field definitions for a ClickUp list (names, types, ids)", + dynamicMode=True, + parameters={ + "connectionReference": WorkflowActionParameter( + name="connectionReference", + type="str", + frontendType=FrontendType.USER_CONNECTION, + required=True, + description="ClickUp connection", + ), + "listId": WorkflowActionParameter( + name="listId", + type="str", + frontendType=FrontendType.TEXT, + required=False, + description="ClickUp list ID (if set, pathQuery is optional)", + ), + "pathQuery": WorkflowActionParameter( + name="pathQuery", + type="str", + frontendType=FrontendType.TEXT, + required=False, + description="Virtual path /team/{teamId}/list/{listId} (same as data source path)", + ), + }, + execute=list_fields.__get__(self, self.__class__), + ), "searchTasks": WorkflowActionDefinition( actionId="clickup.searchTasks", description="Search tasks in a ClickUp workspace (team)", diff --git a/requirements.lock b/requirements.lock index d55fc5f7..b4ae2798 100644 --- a/requirements.lock +++ b/requirements.lock @@ -376,7 +376,7 @@ protobuf==4.21.12 # grpcio-status # mysql-connector-python # proto-plus -psycopg2-binary==2.9.9 +psycopg2-binary==2.9.9 ; python_version < "3.13" # via -r requirements.txt pure-eval==0.2.3 # via stack-data diff --git a/requirements.txt b/requirements.txt index cb1dd467..f5ffb715 100644 --- a/requirements.txt +++ b/requirements.txt @@ -103,7 +103,8 @@ pyviz-comms>=2.0.0 xyzservices>=2021.09.1 # PostgreSQL connector dependencies -psycopg2-binary==2.9.9 +psycopg2-binary==2.9.9; python_version < "3.13" +psycopg2-binary==2.9.11; python_version >= "3.13" asyncpg==0.30.0 ## Stripe payments