From 4852059c7d06c82f6ae7edb70c2ea9c010ff3c80 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Tue, 14 Apr 2026 16:15:32 +0200
Subject: [PATCH 1/2] fixed tools
---
modules/features/graphicalEditor/portTypes.py | 3 +-
modules/routes/routeAudit.py | 82 ++++++++++++++
.../services/serviceAi/mainServiceAi.py | 103 +++++++++++++-----
.../workflows/processing/modes/modeDynamic.py | 21 +---
.../processing/shared/methodDiscovery.py | 26 ++++-
5 files changed, 188 insertions(+), 47 deletions(-)
diff --git a/modules/features/graphicalEditor/portTypes.py b/modules/features/graphicalEditor/portTypes.py
index d9ae2792..59ae01bf 100644
--- a/modules/features/graphicalEditor/portTypes.py
+++ b/modules/features/graphicalEditor/portTypes.py
@@ -292,7 +292,8 @@ def _extractDocuments(upstream: Dict[str, Any]) -> Dict[str, Any]:
docs = files
elif fileIds:
docs = [{"validationMetadata": {"fileId": fid}} for fid in fileIds]
- return {"documents": docs if isinstance(docs, list) else [docs]} if docs else {}
+ normalized = docs if isinstance(docs, list) else [docs]
+ return {"documents": normalized, "documentList": normalized} if docs else {}
def _extractText(upstream: Dict[str, Any]) -> Dict[str, Any]:
diff --git a/modules/routes/routeAudit.py b/modules/routes/routeAudit.py
index 6120bf78..7effc1fb 100644
--- a/modules/routes/routeAudit.py
+++ b/modules/routes/routeAudit.py
@@ -11,6 +11,7 @@ RBAC: mandate-admin or compliance-viewer role required.
"""
import logging
+import re
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
@@ -94,6 +95,28 @@ async def getAiAuditEntryContent(
result = aiAuditLogger.getAiAuditEntryContent(entryId, mandateId)
if not result:
raise HTTPException(status_code=404, detail=routeApiMsg("Audit-Eintrag nicht gefunden"))
+
+ _phRx = re.compile(r"\[([a-z]+)\.([a-f0-9-]{36})\]")
+ combinedText = (result.get("contentInputFull") or "") + (result.get("contentOutputFull") or "")
+ phIds = set(m.group(2) for m in _phRx.finditer(combinedText))
+ mappings = []
+
+ if phIds:
+ try:
+ from modules.features.neutralization.interfaceFeatureNeutralizer import getInterface as getNeutIf
+ neutIf = getNeutIf(context.user, mandateId=mandateId, featureInstanceId=None)
+ for phId in phIds:
+ attr = neutIf.getAttributeById(phId)
+ if attr:
+ mappings.append({
+ "id": attr.get("id", ""),
+ "originalText": attr.get("originalText", ""),
+ "patternType": attr.get("patternType", ""),
+ })
+ except Exception as mapErr:
+ logger.warning(f"Could not resolve neutralization mappings for audit entry {entryId}: {mapErr}")
+
+ result["neutralizationMappings"] = mappings
return result
@@ -147,3 +170,62 @@ async def getAuditStats(
from modules.shared.aiAuditLogger import aiAuditLogger
return aiAuditLogger.getAiAuditStats(mandateId, timeRangeDays=timeRange, groupBy=groupBy)
+
+
+# ── Tab D: Neutralization Mappings ──
+
+@router.get("/neutralization-mappings")
+@limiter.limit("120/minute")
+async def getNeutralizationMappings(
+ request: Request,
+ context: RequestContext = Depends(getRequestContext),
+ limit: int = Query(200, ge=1, le=2000),
+ offset: int = Query(0, ge=0),
+):
+ _requireAuditAccess(context)
+ mandateId = str(context.mandateId) if context.mandateId else ""
+ if not mandateId:
+ raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
+
+ try:
+ from modules.features.neutralization.interfaceFeatureNeutralizer import getInterface as getNeutIf
+ neutIf = getNeutIf(context.user, mandateId=mandateId, featureInstanceId=None)
+ attrs = neutIf.getNeutralizationAttributes()
+ items = [a.model_dump() if hasattr(a, "model_dump") else dict(a) for a in attrs]
+ for item in items:
+ pType = item.get("patternType", "")
+ uid = item.get("id", "")
+ item["placeholder"] = f"[{pType}.{uid}]" if pType and uid else uid
+ items.sort(key=lambda r: (r.get("patternType", ""), r.get("originalText", "")))
+ totalItems = len(items)
+ page = items[offset: offset + limit]
+ return {"items": page, "totalItems": totalItems}
+ except Exception as e:
+ logger.error(f"Failed to load neutralization mappings: {e}")
+ raise HTTPException(status_code=500, detail=routeApiMsg("Fehler beim Laden der Neutralisierungs-Zuordnungen"))
+
+
+@router.delete("/neutralization-mappings/{mappingId}")
+@limiter.limit("60/minute")
+async def deleteNeutralizationMapping(
+ request: Request,
+ mappingId: str = Path(...),
+ context: RequestContext = Depends(getRequestContext),
+):
+ _requireAuditAccess(context)
+ mandateId = str(context.mandateId) if context.mandateId else ""
+ if not mandateId:
+ raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
+
+ try:
+ from modules.features.neutralization.interfaceFeatureNeutralizer import getInterface as getNeutIf
+ neutIf = getNeutIf(context.user, mandateId=mandateId, featureInstanceId=None)
+ deleted = neutIf.deleteAttributeById(mappingId)
+ if not deleted:
+ raise HTTPException(status_code=404, detail=routeApiMsg("Zuordnung nicht gefunden"))
+ return {"success": True, "id": mappingId}
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Failed to delete neutralization mapping {mappingId}: {e}")
+ raise HTTPException(status_code=500, detail=routeApiMsg("Fehler beim Löschen der Zuordnung"))
diff --git a/modules/serviceCenter/services/serviceAi/mainServiceAi.py b/modules/serviceCenter/services/serviceAi/mainServiceAi.py
index 8619ead1..cd7de3e9 100644
--- a/modules/serviceCenter/services/serviceAi/mainServiceAi.py
+++ b/modules/serviceCenter/services/serviceAi/mainServiceAi.py
@@ -207,6 +207,8 @@ class AiService:
elif hasattr(response.metadata, '__dict__'):
response.metadata.neutralizationExcluded = _excludedDocs
+ self._writeAuditEntry(request, response, _wasNeutralized)
+
return response
async def callAiStream(self, request: AiCallRequest):
@@ -233,9 +235,11 @@ class AiService:
logger.debug("callAiStream: neutralization phase done, starting main AI stream")
self.aiObjects.billingCallback = self._createBillingCallback()
+ _finalResponse = None
try:
async for chunk in self.aiObjects.callWithTextContextStream(request):
if not isinstance(chunk, str):
+ _finalResponse = chunk
if _excludedDocs:
if not hasattr(chunk, 'metadata') or chunk.metadata is None:
chunk.metadata = {}
@@ -246,6 +250,8 @@ class AiService:
yield chunk
finally:
self.aiObjects.billingCallback = None
+ if _finalResponse:
+ self._writeAuditEntry(request, _finalResponse, _wasNeutralized)
async def callEmbedding(self, texts: List[str]) -> AiCallResponse:
"""Generate embeddings while respecting allowedProviders."""
@@ -1092,35 +1098,78 @@ detectedIntent-Werte:
f"provider={provider}, model={modelName}, error={e}"
)
- try:
- from modules.shared.aiAuditLogger import aiAuditLogger
-
- contentOut = getattr(response, 'content', None)
- metadata = getattr(response, 'metadata', None) or {}
- tokensUsed = metadata.get('tokensUsed') if isinstance(metadata, dict) else None
-
- aiAuditLogger.logAiCall(
- userId=user.id,
- mandateId=mandateId or "",
- aiProvider=provider,
- aiModel=modelName,
- username=getattr(user, 'username', None),
- featureInstanceId=featureInstanceId,
- featureCode=featureCode,
- operationType=metadata.get('operationType') if isinstance(metadata, dict) else None,
- tokensInput=tokensUsed.get('input') if isinstance(tokensUsed, dict) else None,
- tokensOutput=tokensUsed.get('output') if isinstance(tokensUsed, dict) else None,
- processingTimeMs=int(processingTime * 1000) if processingTime else None,
- priceCHF=basePriceCHF if basePriceCHF else None,
- contentOutput=str(contentOut)[:500] if contentOut else None,
- success=not hasError,
- errorMessage=str(getattr(response, 'errorMessage', None)) if hasError else None,
- )
- except Exception as e:
- logger.warning(f"AI audit log failed (non-critical): {e}")
-
return _billingCallback
+ def _writeAuditEntry(self, request, response, wasNeutralized: bool = False):
+ """Write a rich AI audit entry with input, output, and neutralization metadata."""
+ try:
+ from modules.shared.aiAuditLogger import aiAuditLogger
+
+ user = self.services.user
+ mandateId = self.services.mandateId
+ featureInstanceId = getattr(self.services, 'featureInstanceId', None)
+ featureCode = getattr(self.services, 'featureCode', None)
+
+ provider = getattr(response, 'provider', None) or 'unknown'
+ modelName = getattr(response, 'modelName', None) or 'unknown'
+ basePriceCHF = getattr(response, 'priceCHF', 0.0)
+ hasError = getattr(response, 'errorCount', 0) > 0
+ processingTime = getattr(response, 'processingTime', None)
+ metadata = getattr(response, 'metadata', None) or {}
+ tokensUsed = metadata.get('tokensUsed') if isinstance(metadata, dict) else None
+
+ inputParts = []
+ if request.prompt:
+ inputParts.append(f"[Prompt] {request.prompt}")
+ if request.context:
+ inputParts.append(f"[Context] {request.context}")
+ if request.messages and isinstance(request.messages, list):
+ for msg in request.messages:
+ role = msg.get("role", "?") if isinstance(msg, dict) else "?"
+ content = msg.get("content", "") if isinstance(msg, dict) else ""
+ if isinstance(content, str) and content:
+ inputParts.append(f"[{role}] {content}")
+ elif isinstance(content, list):
+ textParts = [p.get("text", "") for p in content if isinstance(p, dict) and p.get("type") == "text"]
+ if textParts:
+ inputParts.append(f"[{role}] {' '.join(textParts)}")
+ contentInput = "\n---\n".join(inputParts) if inputParts else None
+
+ contentOut = getattr(response, 'content', None)
+ contentOutput = str(contentOut) if contentOut else None
+
+ neutralSvc = self._get_service("neutralization") if wasNeutralized else None
+ mappingsCount = None
+ if neutralSvc and hasattr(neutralSvc, 'getActiveMappingsCount'):
+ try:
+ mappingsCount = neutralSvc.getActiveMappingsCount()
+ except Exception:
+ pass
+
+ aiAuditLogger.logAiCall(
+ userId=user.id,
+ mandateId=mandateId or "",
+ aiProvider=provider,
+ aiModel=modelName,
+ username=getattr(user, 'username', None),
+ featureInstanceId=featureInstanceId,
+ featureCode=featureCode,
+ operationType=metadata.get('operationType') if isinstance(metadata, dict) else None,
+ tokensInput=tokensUsed.get('input') if isinstance(tokensUsed, dict) else None,
+ tokensOutput=tokensUsed.get('output') if isinstance(tokensUsed, dict) else None,
+ processingTimeMs=int(processingTime * 1000) if processingTime else None,
+ priceCHF=basePriceCHF if basePriceCHF else None,
+ neutralizationActive=wasNeutralized,
+ neutralizationMappingsCount=mappingsCount,
+ contentInput=contentInput,
+ contentOutput=contentOutput,
+ storeFullContent=True,
+ success=not hasError,
+ errorMessage=str(getattr(response, 'errorMessage', None)) if hasError else None,
+ )
+ except Exception as e:
+ logger.warning(f"AI audit log failed (non-critical): {e}")
+
def _calculateEffectiveProviders(self) -> Optional[List[str]]:
"""
Calculate effective allowed providers: RBAC ∩ Workflow.
diff --git a/modules/workflows/processing/modes/modeDynamic.py b/modules/workflows/processing/modes/modeDynamic.py
index 67a32a64..49e19705 100644
--- a/modules/workflows/processing/modes/modeDynamic.py
+++ b/modules/workflows/processing/modes/modeDynamic.py
@@ -648,40 +648,31 @@ class DynamicMode(BaseMode):
methodName, actionName = compoundActionName.split('.', 1)
from modules.workflows.processing.shared.methodDiscovery import methods as _methods
if methodName in _methods:
- methodInstance = _methods[methodName]['instance']
- if actionName in methodInstance.actions:
- action_info = methodInstance.actions[actionName]
- # Use structured WorkflowActionParameter objects from new system
+ storedActions = _methods[methodName].get('actions', {})
+ if actionName in storedActions:
+ action_info = storedActions[actionName]
parameters_def = action_info.get('parameters', {})
if 'documentList' in parameters_def:
- # Convert DocumentReferenceList to string list for database serialization
- # Action methods will convert it back to DocumentReferenceList when needed
parameters['documentList'] = docList.to_string_list()
logger.info(f"Added documentList to parameters: {len(docList.references)} references")
elif 'documentList' not in parameters and isinstance(selection, dict) and 'parameters' in selection:
- # Fallback: if documentList is already in selection['parameters'] as a list, preserve it
- # This handles guided actions where documentList is already in the right format
docListParam = selection['parameters'].get('documentList')
if docListParam and isinstance(docListParam, list):
parameters['documentList'] = docListParam
logger.info(f"Preserved documentList from selection parameters: {len(docListParam)} references")
- # Use connectionReference from selection (required)
connectionRef = selection.get('connectionReference')
- # If not found at top level, check in selection['parameters'] (guided action case)
if not connectionRef and isinstance(selection, dict) and 'parameters' in selection:
connectionRef = selection['parameters'].get('connectionReference')
if connectionRef:
- # Check if action actually has connectionReference parameter
methodName, actionName = compoundActionName.split('.', 1)
from modules.workflows.processing.shared.methodDiscovery import methods as _methods
if methodName in _methods:
- methodInstance = _methods[methodName]['instance']
- if actionName in methodInstance.actions:
- action_info = methodInstance.actions[actionName]
- # Use structured WorkflowActionParameter objects from new system
+ storedActions = _methods[methodName].get('actions', {})
+ if actionName in storedActions:
+ action_info = storedActions[actionName]
parameters_def = action_info.get('parameters', {})
if 'connectionReference' in parameters_def:
parameters['connectionReference'] = connectionRef
diff --git a/modules/workflows/processing/shared/methodDiscovery.py b/modules/workflows/processing/shared/methodDiscovery.py
index b8403b3d..213dee83 100644
--- a/modules/workflows/processing/shared/methodDiscovery.py
+++ b/modules/workflows/processing/shared/methodDiscovery.py
@@ -16,6 +16,24 @@ logger = logging.getLogger(__name__)
# Global methods catalog - moved from serviceCenter
methods = {}
+def _collectActionsUnfiltered(methodInstance) -> Dict[str, Dict[str, Any]]:
+ """Collect actions from a method instance without RBAC filtering.
+
+ During discovery, services.rbac is not yet available (no user context).
+ RBAC is enforced at execution time by the ActionExecutor instead.
+ """
+ result = {}
+ if not hasattr(methodInstance, '_actions') or not methodInstance._actions:
+ return result
+ for actionName, actionDef in methodInstance._actions.items():
+ result[actionName] = {
+ 'description': actionDef.description,
+ 'parameters': methodInstance._convertParametersToSystemFormat(actionDef.parameters),
+ 'method': methodInstance._createActionWrapper(actionDef)
+ }
+ return result
+
+
def discoverMethods(serviceCenter):
"""Dynamically discover all method classes and their actions in modules methods package.
@@ -47,7 +65,7 @@ def discoverMethods(serviceCenter):
continue
methodInstance = item(serviceCenter)
- actions = methodInstance.actions
+ actions = _collectActionsUnfiltered(methodInstance)
methodInfo = {
'instance': methodInstance,
@@ -76,11 +94,11 @@ def getActionParameterList(methodName: str, actionName: str, methods: Dict[str,
if not methods or methodName not in methods:
return ""
- methodInstance = methods[methodName]['instance']
- if actionName not in methodInstance.actions:
+ storedActions = methods[methodName].get('actions', {})
+ if actionName not in storedActions:
return ""
- action_info = methodInstance.actions[actionName]
+ action_info = storedActions[actionName]
# Use structured WorkflowActionParameter objects from new system
parameters = action_info.get('parameters', {})
From 005b794a2a8aa1d7a43faf062ded963f0a96499f Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Tue, 14 Apr 2026 16:22:55 +0200
Subject: [PATCH 2/2] fixed doc handover
---
.../automation2/executors/actionNodeExecutor.py | 15 ++++++++++++++-
.../methodTrustee/actions/processDocuments.py | 6 ++++++
2 files changed, 20 insertions(+), 1 deletion(-)
diff --git a/modules/workflows/automation2/executors/actionNodeExecutor.py b/modules/workflows/automation2/executors/actionNodeExecutor.py
index 7c85a757..0d5134f1 100644
--- a/modules/workflows/automation2/executors/actionNodeExecutor.py
+++ b/modules/workflows/automation2/executors/actionNodeExecutor.py
@@ -158,19 +158,23 @@ def _applyEmailSearchQuery(params: Dict) -> None:
def _wireHandover(nodeDef: Dict, inputSources: Dict, nodeOutputs: Dict, params: Dict) -> None:
"""Apply wire-handover: extract fields from upstream using INPUT_EXTRACTORS."""
if 0 not in inputSources:
+ logger.debug("_wireHandover: no port 0 in inputSources=%s", inputSources)
return
srcId, _ = inputSources[0]
upstream = nodeOutputs.get(srcId)
if not upstream or not isinstance(upstream, dict):
+ logger.debug("_wireHandover: upstream for %s is missing or not dict: %s", srcId, type(upstream))
return
data = _unwrapTransit(upstream)
if not isinstance(data, dict):
+ logger.debug("_wireHandover: unwrapped data is not dict: %s", type(data))
return
inputPorts = nodeDef.get("inputPorts", {})
port0 = inputPorts.get(0, {})
accepts = port0.get("accepts", [])
+ logger.debug("_wireHandover: srcId=%s accepts=%s upstream_keys=%s params_keys_before=%s", srcId, accepts, list(data.keys()), list(params.keys()))
for schemaName in accepts:
if schemaName == "Transit":
@@ -178,9 +182,15 @@ def _wireHandover(nodeDef: Dict, inputSources: Dict, nodeOutputs: Dict, params:
extractor = INPUT_EXTRACTORS.get(schemaName)
if extractor:
extracted = extractor(data)
+ logger.debug("_wireHandover: extractor %s returned keys=%s", schemaName, list(extracted.keys()) if extracted else None)
if extracted:
for k, v in extracted.items():
- params.setdefault(k, v)
+ existing = params.get(k)
+ if not existing:
+ params[k] = v
+ logger.debug("_wireHandover: set %s (was empty/missing) type=%s len=%s", k, type(v).__name__, len(v) if isinstance(v, (list, str, dict)) else "n/a")
+ else:
+ logger.debug("_wireHandover: skip %s (already has value, type=%s)", k, type(existing).__name__)
return
@@ -222,11 +232,14 @@ class ActionNodeExecutor:
# 1. Resolve parameters (DataRef, SystemVar, Static)
params = dict(node.get("parameters") or {})
+ logger.debug("ActionNodeExecutor node %s raw params keys=%s", nodeId, list(params.keys()))
resolvedParams = resolveParameterReferences(params, context.get("nodeOutputs", {}))
+ logger.debug("ActionNodeExecutor node %s resolved params keys=%s documentList_present=%s documentList_type=%s", nodeId, list(resolvedParams.keys()), "documentList" in resolvedParams, type(resolvedParams.get("documentList")).__name__)
# 2. Wire-handover via extractors (fills missing params from upstream)
inputSources = context.get("inputSources", {}).get(nodeId, {})
_wireHandover(nodeDef, inputSources, context.get("nodeOutputs", {}), resolvedParams)
+ logger.debug("ActionNodeExecutor node %s after wireHandover: params keys=%s documentList_present=%s documentList_type=%s", nodeId, list(resolvedParams.keys()), "documentList" in resolvedParams, type(resolvedParams.get("documentList")).__name__)
# 3. Apply defaults from parameter definitions
for pDef in nodeDef.get("parameters", []):
diff --git a/modules/workflows/methods/methodTrustee/actions/processDocuments.py b/modules/workflows/methods/methodTrustee/actions/processDocuments.py
index c4bf9df1..0d29c9bd 100644
--- a/modules/workflows/methods/methodTrustee/actions/processDocuments.py
+++ b/modules/workflows/methods/methodTrustee/actions/processDocuments.py
@@ -213,9 +213,11 @@ def _resolveDocumentList(documentListParam, services) -> List[tuple]:
if isinstance(documentListParam, list) and documentListParam:
first = documentListParam[0]
+ logger.debug("_resolveDocumentList: %d items, first type=%s, first keys=%s", len(documentListParam), type(first).__name__, list(first.keys()) if isinstance(first, dict) else "n/a")
if isinstance(first, dict) and ("documentData" in first or "documentName" in first):
for doc in documentListParam:
rawData = doc.get("documentData")
+ logger.debug("_resolveDocumentList: doc keys=%s documentData type=%s documentData truthy=%s", list(doc.keys()), type(rawData).__name__, bool(rawData))
if not rawData:
continue
try:
@@ -228,6 +230,8 @@ def _resolveDocumentList(documentListParam, services) -> List[tuple]:
results.append((data, fileId, fileName, mimeType))
if results:
return results
+ else:
+ logger.debug("_resolveDocumentList: first item has no documentData/documentName key, falling through to chat fallback")
chatService = getattr(services, "chat", None)
if not chatService:
@@ -268,7 +272,9 @@ async def processDocuments(self, parameters: Dict[str, Any]) -> ActionResult:
return ActionResult.isFailure(error="featureInstanceId is required")
try:
+ logger.debug("processDocuments: documentListParam type=%s len=%s", type(documentListParam).__name__, len(documentListParam) if isinstance(documentListParam, (list, str)) else "n/a")
extractionDocs = _resolveDocumentList(documentListParam, self.services)
+ logger.debug("processDocuments: extractionDocs count=%d", len(extractionDocs))
if not extractionDocs:
return ActionResult.isFailure(error="No documents found for documentList")