605 lines
26 KiB
Python
605 lines
26 KiB
Python
# Copyright (c) 2025 Patrick Motsch
|
|
# Action node executor — maps ai.*, email.*, sharepoint.*, clickup.*, file.*, trustee.* to method actions.
|
|
#
|
|
# Typed port system: parameters resolve via DataRefs / static values. Declarative port inheritance
|
|
# uses ``graphInherit`` on parameter definitions in node JSON (see STATIC_NODE_TYPES): e.g.
|
|
# ``primaryTextRef`` is materialized to explicit refs in pickNotPushMigration.materializePrimaryTextHandover;
|
|
# ``documentListWire`` is applied at runtime in this executor via graphUtils.extract_wired_document_list.
|
|
|
|
|
|
import base64
|
|
import binascii
|
|
import json
|
|
import logging
|
|
import re
|
|
from typing import Any, Dict, Optional
|
|
|
|
from modules.features.graphicalEditor.portTypes import (
|
|
_normalizeError,
|
|
normalizeToSchema,
|
|
)
|
|
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException as _SubscriptionInactiveException
|
|
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError as _BillingContextError
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def _looks_like_ascii_base64_payload(s: str) -> bool:
|
|
"""Heuristic: ActionDocument binary payloads use standard ASCII base64; markdown/text uses other chars (#, *, -, …)."""
|
|
t = "".join(s.split())
|
|
if len(t) < 8:
|
|
return False
|
|
if not t.isascii():
|
|
return False
|
|
return bool(re.fullmatch(r"[A-Za-z0-9+/]+=*", t)) and len(t) % 4 == 0
|
|
|
|
|
|
def _coerce_document_data_to_bytes(raw: Any) -> Optional[bytes]:
|
|
"""Normalize documentData for DB file persistence.
|
|
|
|
ActionDocument conventions (see methodFile.create): binary bodies are carried as ASCII
|
|
base64 strings; plain markdown/text stays as Unicode. Do not UTF-8-encode a base64
|
|
literal — that persists the ASCII of the encoding (file looks like base64 gibberish).
|
|
"""
|
|
if raw is None:
|
|
return None
|
|
if isinstance(raw, bytes):
|
|
return raw if len(raw) > 0 else None
|
|
if isinstance(raw, bytearray):
|
|
b = bytes(raw)
|
|
return b if len(b) > 0 else None
|
|
if isinstance(raw, memoryview):
|
|
b = raw.tobytes()
|
|
return b if len(b) > 0 else None
|
|
if isinstance(raw, str):
|
|
stripped = raw.strip()
|
|
if not stripped:
|
|
return None
|
|
if _looks_like_ascii_base64_payload(stripped):
|
|
try:
|
|
decoded = base64.b64decode(stripped, validate=True)
|
|
except (TypeError, binascii.Error, ValueError):
|
|
try:
|
|
decoded = base64.b64decode(stripped)
|
|
except (binascii.Error, ValueError):
|
|
decoded = b""
|
|
if decoded:
|
|
return decoded
|
|
b = stripped.encode("utf-8")
|
|
return b if len(b) > 0 else None
|
|
return None
|
|
|
|
|
|
def _image_documents_from_docs_list(docs_list: list) -> list:
|
|
"""All image/* ActionDocument dicts (generic — no assumptions about index 0)."""
|
|
return [
|
|
d for d in (docs_list or [])
|
|
if isinstance(d, dict) and str(d.get("mimeType") or "").strip().lower().startswith("image/")
|
|
]
|
|
|
|
|
|
_USER_CONNECTION_ID_RE = re.compile(
|
|
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
|
re.IGNORECASE,
|
|
)
|
|
|
|
|
|
def _isUserConnectionId(val: Any) -> bool:
|
|
if val is None or isinstance(val, (dict, list)):
|
|
return False
|
|
s = str(val).strip()
|
|
return bool(_USER_CONNECTION_ID_RE.match(s))
|
|
|
|
|
|
def _getNodeDefinition(nodeType: str) -> Optional[Dict[str, Any]]:
|
|
"""Get node definition by type id."""
|
|
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
|
for node in STATIC_NODE_TYPES:
|
|
if node.get("id") == nodeType:
|
|
return node
|
|
return None
|
|
|
|
|
|
def _resolveConnectionIdToReference(chatService, connectionId: str, services=None) -> Optional[str]:
|
|
"""
|
|
Resolve connectionId (UserConnection.id) to connectionReference format.
|
|
connectionReference format: connection:{authority}:{externalUsername}
|
|
"""
|
|
if not connectionId:
|
|
return None
|
|
if isinstance(connectionId, str) and connectionId.startswith("connection:"):
|
|
return connectionId
|
|
if chatService:
|
|
try:
|
|
connections = chatService.getUserConnections()
|
|
for c in connections or []:
|
|
conn = c if isinstance(c, dict) else (c.model_dump() if hasattr(c, "model_dump") else {})
|
|
if str(conn.get("id")) == str(connectionId):
|
|
authority = conn.get("authority")
|
|
if hasattr(authority, "value"):
|
|
authority = authority.value
|
|
username = conn.get("externalUsername", "")
|
|
return f"connection:{authority}:{username}"
|
|
except Exception as e:
|
|
logger.debug("_resolveConnectionIdToReference chatService: %s", e)
|
|
app = getattr(services, "interfaceDbApp", None) if services else None
|
|
if app and hasattr(app, "getUserConnectionById"):
|
|
try:
|
|
conn = app.getUserConnectionById(str(connectionId))
|
|
if conn:
|
|
authority = getattr(conn, "authority", None)
|
|
if hasattr(authority, "value"):
|
|
authority = authority.value
|
|
else:
|
|
authority = str(authority) if authority else "outlook"
|
|
username = getattr(conn, "externalUsername", "") or ""
|
|
return f"connection:{authority}:{username}"
|
|
except Exception as e:
|
|
logger.debug("_resolveConnectionIdToReference getUserConnectionById: %s", e)
|
|
return None
|
|
|
|
|
|
def _buildEmailFilter(fromAddress: str = None, subjectContains: str = None, hasAttachment: bool = None) -> str:
|
|
"""Build Microsoft Graph API $filter string."""
|
|
parts = []
|
|
if fromAddress and str(fromAddress).strip():
|
|
safe = str(fromAddress).strip().replace("'", "''")
|
|
parts.append(f"from/emailAddress/address eq '{safe}'")
|
|
if subjectContains and str(subjectContains).strip():
|
|
safe = str(subjectContains).strip().replace("'", "''")
|
|
parts.append(f"contains(subject,'{safe}')")
|
|
if hasAttachment is True:
|
|
parts.append("hasAttachments eq true")
|
|
return " and ".join(parts) if parts else ""
|
|
|
|
|
|
def _buildSearchQuery(
|
|
query: str = None, fromAddress: str = None, toAddress: str = None,
|
|
subjectContains: str = None, bodyContains: str = None,
|
|
hasAttachment: bool = None, filterStr: str = None,
|
|
) -> str:
|
|
"""Build Microsoft Graph $search query from discrete params."""
|
|
if filterStr and str(filterStr).strip():
|
|
return str(filterStr).strip()
|
|
parts = []
|
|
if query and str(query).strip():
|
|
parts.append(str(query).strip())
|
|
if fromAddress and str(fromAddress).strip():
|
|
parts.append(f'from:{str(fromAddress).strip().replace(chr(34), "")}')
|
|
if toAddress and str(toAddress).strip():
|
|
parts.append(f'to:{str(toAddress).strip().replace(chr(34), "")}')
|
|
if subjectContains and str(subjectContains).strip():
|
|
parts.append(f'subject:{str(subjectContains).strip().replace(chr(34), "")}')
|
|
if bodyContains and str(bodyContains).strip():
|
|
parts.append(f'body:{str(bodyContains).strip().replace(chr(34), "")}')
|
|
if hasAttachment is True:
|
|
parts.append("hasattachments:true")
|
|
return " ".join(parts) if parts else "*"
|
|
|
|
|
|
def _buildConnectionRefDict(connRef: str, chatService, services) -> Optional[Dict[str, Any]]:
|
|
"""
|
|
Build {id, authority, label} for node outputs (no secrets).
|
|
connRef may be UUID or logical connection:authority:user.
|
|
"""
|
|
if not connRef or not isinstance(connRef, str):
|
|
return None
|
|
original_ref = connRef.strip()
|
|
ref = original_ref
|
|
if _isUserConnectionId(ref):
|
|
resolved = _resolveConnectionIdToReference(chatService, ref, services)
|
|
if resolved:
|
|
ref = resolved
|
|
if not ref.startswith("connection:"):
|
|
return None
|
|
parts = ref.split(":", 2)
|
|
authority = parts[1] if len(parts) > 1 else ""
|
|
user = parts[2] if len(parts) > 2 else ""
|
|
label = ref
|
|
conn_id = ""
|
|
if chatService:
|
|
try:
|
|
for c in chatService.getUserConnections() or []:
|
|
conn = c if isinstance(c, dict) else (c.model_dump() if hasattr(c, "model_dump") else {})
|
|
aid = conn.get("authority", "")
|
|
if hasattr(aid, "value"):
|
|
aid = aid.value
|
|
un = conn.get("externalUsername", "") or conn.get("externalId", "") or ""
|
|
logical = f"connection:{aid}:{un}"
|
|
if logical == ref or str(conn.get("id")) == original_ref:
|
|
conn_id = str(conn.get("id", "") or "")
|
|
break
|
|
except Exception as e:
|
|
logger.debug("_buildConnectionRefDict: getUserConnections: %s", e)
|
|
return {"id": conn_id, "authority": authority, "label": label or f"{authority}:{user}"}
|
|
|
|
|
|
def _schemaCarriesConnectionProvenance(outputSchema: str) -> bool:
|
|
"""True iff the port schema declares ``carriesConnectionProvenance`` in the catalog."""
|
|
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG
|
|
schema = PORT_TYPE_CATALOG.get(outputSchema)
|
|
return bool(getattr(schema, "carriesConnectionProvenance", False))
|
|
|
|
|
|
def _attachConnectionProvenance(
|
|
out: Dict[str, Any],
|
|
resolvedParams: Dict[str, Any],
|
|
outputSchema: str,
|
|
chatService,
|
|
services,
|
|
) -> None:
|
|
"""Mutates out to include connection provenance for typed list/draft outputs."""
|
|
if out.get("connection"):
|
|
return
|
|
cref = resolvedParams.get("connectionReference")
|
|
if not cref:
|
|
return
|
|
if not _schemaCarriesConnectionProvenance(outputSchema):
|
|
return
|
|
payload = _buildConnectionRefDict(str(cref), chatService, services)
|
|
if payload:
|
|
out["connection"] = payload
|
|
|
|
|
|
def _resolveConnectionParam(params: Dict, chatService, services) -> None:
|
|
"""Resolve connectionReference if it looks like a UUID (UserConnection.id)."""
|
|
connRef = params.get("connectionReference")
|
|
if connRef and _isUserConnectionId(connRef):
|
|
resolved = _resolveConnectionIdToReference(chatService, connRef, services)
|
|
if resolved:
|
|
params["connectionReference"] = resolved
|
|
|
|
|
|
def _mapper_emailCheckFilter(params: Dict, **_) -> None:
|
|
built = _buildEmailFilter(
|
|
fromAddress=params.get("fromAddress"),
|
|
subjectContains=params.get("subjectContains"),
|
|
hasAttachment=params.get("hasAttachment"),
|
|
)
|
|
rawFilter = (params.get("filter") or "").strip()
|
|
params["filter"] = built if built else (rawFilter if rawFilter else None)
|
|
for k in ("fromAddress", "subjectContains", "hasAttachment"):
|
|
params.pop(k, None)
|
|
|
|
|
|
def _mapper_emailSearchQuery(params: Dict, **_) -> None:
|
|
built = _buildSearchQuery(
|
|
query=params.get("query"),
|
|
fromAddress=params.get("fromAddress"),
|
|
toAddress=params.get("toAddress"),
|
|
subjectContains=params.get("subjectContains"),
|
|
bodyContains=params.get("bodyContains"),
|
|
hasAttachment=params.get("hasAttachment"),
|
|
filterStr=params.get("filter"),
|
|
)
|
|
params["query"] = built
|
|
for k in ("fromAddress", "toAddress", "subjectContains", "bodyContains", "hasAttachment", "filter"):
|
|
params.pop(k, None)
|
|
|
|
|
|
def _mapper_aiPromptLegacyAlias(params: Dict, **_) -> None:
|
|
"""Backwards-compatible alias: legacy ``prompt`` parameter is exposed as ``aiPrompt``."""
|
|
if "aiPrompt" not in params and "prompt" in params:
|
|
params["aiPrompt"] = params.pop("prompt")
|
|
|
|
|
|
def _mapper_emailDraftContextFromSubjectBody(params: Dict, **_) -> None:
|
|
"""Build ``context`` from discrete subject + body fields and drop them."""
|
|
subject = params.get("subject", "")
|
|
body = params.get("body", "")
|
|
if not (subject or body):
|
|
return
|
|
parts = []
|
|
if subject:
|
|
parts.append(f"Subject: {subject}")
|
|
if body:
|
|
parts.append(f"Body:\n{body}")
|
|
params["context"] = "\n\n".join(parts)
|
|
params.pop("subject", None)
|
|
params.pop("body", None)
|
|
|
|
|
|
def _mapper_clickupTaskUpdateMerge(params: Dict, **_) -> None:
|
|
from modules.workflows.automation2.clickupTaskUpdateMerge import merge_clickup_task_update_entries
|
|
merge_clickup_task_update_entries(params)
|
|
|
|
|
|
_PARAM_MAPPERS: Dict[str, Any] = {
|
|
"emailCheckFilter": _mapper_emailCheckFilter,
|
|
"emailSearchQuery": _mapper_emailSearchQuery,
|
|
"aiPromptLegacyAlias": _mapper_aiPromptLegacyAlias,
|
|
"emailDraftContextFromSubjectBody": _mapper_emailDraftContextFromSubjectBody,
|
|
"clickupTaskUpdateMerge": _mapper_clickupTaskUpdateMerge,
|
|
}
|
|
|
|
|
|
def _applyParamMappers(nodeDef: Dict[str, Any], resolvedParams: Dict[str, Any]) -> None:
|
|
"""Run declared ``paramMappers`` from the node definition (no node-id branching)."""
|
|
mappers = nodeDef.get("paramMappers") or []
|
|
for name in mappers:
|
|
fn = _PARAM_MAPPERS.get(name)
|
|
if not fn:
|
|
logger.warning("Unknown paramMapper %r — node %s; skipping", name, nodeDef.get("id"))
|
|
continue
|
|
try:
|
|
fn(resolvedParams)
|
|
except Exception as e:
|
|
logger.warning("paramMapper %r failed for node %s: %s", name, nodeDef.get("id"), e)
|
|
|
|
|
|
def _getOutputSchemaName(nodeDef: Dict) -> str:
|
|
"""Get the output schema name from the node definition."""
|
|
outputPorts = nodeDef.get("outputPorts", {})
|
|
port0 = outputPorts.get(0, {})
|
|
return port0.get("schema", "ActionResult")
|
|
|
|
|
|
class ActionNodeExecutor:
|
|
"""Execute action nodes by mapping to method actions via ActionExecutor."""
|
|
|
|
def __init__(self, services: Any):
|
|
self.services = services
|
|
|
|
async def execute(
|
|
self,
|
|
node: Dict[str, Any],
|
|
context: Dict[str, Any],
|
|
) -> Any:
|
|
from modules.features.graphicalEditor.nodeRegistry import getNodeTypeToMethodAction
|
|
from modules.workflows.automation2.graphUtils import (
|
|
document_list_param_is_empty,
|
|
extract_wired_document_list,
|
|
resolveParameterReferences,
|
|
)
|
|
from modules.workflows.processing.core.actionExecutor import ActionExecutor
|
|
|
|
nodeType = node.get("type", "")
|
|
nodeId = node.get("id", "")
|
|
logger.info("ActionNodeExecutor node %s type=%s", nodeId, nodeType)
|
|
|
|
mapping = getNodeTypeToMethodAction()
|
|
methodAction = mapping.get(nodeType)
|
|
if not methodAction:
|
|
logger.debug("ActionNodeExecutor node %s not in mapping -> None", nodeId)
|
|
return None
|
|
|
|
methodName, actionName = methodAction
|
|
nodeDef = _getNodeDefinition(nodeType) or {}
|
|
outputSchema = _getOutputSchemaName(nodeDef)
|
|
|
|
# 1. Resolve parameters (DataRef, SystemVar, Static)
|
|
params = dict(node.get("parameters") or {})
|
|
logger.debug("ActionNodeExecutor node %s raw params keys=%s", nodeId, list(params.keys()))
|
|
resolvedParams = resolveParameterReferences(params, context.get("nodeOutputs", {}))
|
|
logger.debug("ActionNodeExecutor node %s resolved params keys=%s documentList_present=%s documentList_type=%s", nodeId, list(resolvedParams.keys()), "documentList" in resolvedParams, type(resolvedParams.get("documentList")).__name__)
|
|
|
|
# 2. Apply defaults from parameter definitions
|
|
for pDef in nodeDef.get("parameters", []):
|
|
pName = pDef.get("name")
|
|
if pName and pName not in resolvedParams and "default" in pDef:
|
|
resolvedParams[pName] = pDef["default"]
|
|
|
|
for pDef in nodeDef.get("parameters") or []:
|
|
gi = pDef.get("graphInherit") or {}
|
|
if gi.get("kind") != "documentListWire":
|
|
continue
|
|
pname = pDef.get("name")
|
|
if not pname or not document_list_param_is_empty(resolvedParams.get(pname)):
|
|
continue
|
|
port_ix = int(gi.get("port", 0))
|
|
_src_map = (context.get("inputSources") or {}).get(nodeId) or {}
|
|
_entry = _src_map.get(port_ix)
|
|
if not _entry:
|
|
continue
|
|
_src_node_id, _ = _entry
|
|
_upstream = (context.get("nodeOutputs") or {}).get(_src_node_id)
|
|
_wired = extract_wired_document_list(_upstream)
|
|
if _wired:
|
|
resolvedParams[pname] = _wired
|
|
|
|
# 3. Resolve connectionReference
|
|
chatService = getattr(self.services, "chat", None)
|
|
_resolveConnectionParam(resolvedParams, chatService, self.services)
|
|
|
|
# 4. Apply declarative paramMappers from the node definition
|
|
_applyParamMappers(nodeDef, resolvedParams)
|
|
|
|
# 5. email.checkEmail pause for email wait
|
|
if nodeType == "email.checkEmail":
|
|
runId = context.get("_runId")
|
|
workflowId = context.get("workflowId")
|
|
connRef = resolvedParams.get("connectionReference")
|
|
if runId and workflowId and connRef:
|
|
from modules.workflows.automation2.executors import PauseForEmailWaitError
|
|
waitConfig = {
|
|
"connectionReference": connRef,
|
|
"folder": resolvedParams.get("folder", "Inbox"),
|
|
"limit": min(int(resolvedParams.get("limit") or 10), 50),
|
|
"filter": resolvedParams.get("filter"),
|
|
}
|
|
raise PauseForEmailWaitError(runId=runId, nodeId=nodeId, waitConfig=waitConfig)
|
|
|
|
# 6. Execute action
|
|
logger.info("ActionNodeExecutor node %s calling %s.%s with %d params", nodeId, methodName, actionName, len(resolvedParams))
|
|
try:
|
|
executor = ActionExecutor(self.services)
|
|
result = await executor.executeAction(methodName, actionName, resolvedParams)
|
|
except (_SubscriptionInactiveException, _BillingContextError):
|
|
raise
|
|
except Exception as e:
|
|
logger.exception("ActionNodeExecutor node %s FAILED: %s", nodeId, e)
|
|
return _normalizeError(e, outputSchema)
|
|
|
|
# 7. Persist generated documents as files and build JSON-safe output
|
|
_raw_folder_id = resolvedParams.get("folderId")
|
|
persist_folder_id: Optional[str] = None
|
|
if _raw_folder_id is not None:
|
|
_s = str(_raw_folder_id).strip()
|
|
if _s:
|
|
persist_folder_id = _s
|
|
|
|
docsList = []
|
|
for d in (result.documents or []):
|
|
dumped = d.model_dump() if hasattr(d, "model_dump") else dict(d) if isinstance(d, dict) else d
|
|
if isinstance(dumped, dict):
|
|
_meta = dumped.get("validationMetadata") if isinstance(dumped.get("validationMetadata"), dict) else {}
|
|
_existing = dumped.get("fileId") or _meta.get("fileId")
|
|
# e.g. file.create already persisted inside the action — avoid a second FileItem with wrong bytes
|
|
if _existing and str(_existing).strip():
|
|
dumped["documentData"] = None
|
|
dumped.setdefault("_hasBinaryData", True)
|
|
docsList.append(dumped)
|
|
continue
|
|
|
|
rawData = getattr(d, "documentData", None) if hasattr(d, "documentData") else (dumped.get("documentData") if isinstance(dumped, dict) else None)
|
|
rawBytes = _coerce_document_data_to_bytes(rawData)
|
|
# Extracted page images are workflow intermediates — keep bytes as base64 on the
|
|
# ActionDocument only; do not create rows in the user's file library (Meine Dateien).
|
|
if isinstance(dumped, dict) and rawBytes:
|
|
_meta = dumped.get("validationMetadata") if isinstance(dumped.get("validationMetadata"), dict) else {}
|
|
if (
|
|
_meta.get("actionType") == "context.extractContent"
|
|
and _meta.get("handoverRole") == "extractedMedia"
|
|
):
|
|
dumped["documentData"] = base64.b64encode(rawBytes).decode("ascii")
|
|
dumped["_hasBinaryData"] = True
|
|
docsList.append(dumped)
|
|
continue
|
|
if isinstance(dumped, dict) and rawBytes:
|
|
try:
|
|
from modules.interfaces.interfaceDbManagement import getInterface as _getMgmtInterface
|
|
from modules.interfaces.interfaceDbApp import getInterface as _getAppInterface
|
|
from modules.security.rootAccess import getRootUser
|
|
_userId = context.get("userId")
|
|
_mandateId = context.get("mandateId")
|
|
_instanceId = context.get("instanceId")
|
|
_owner = None
|
|
if _userId:
|
|
try:
|
|
_umap = _getAppInterface(getRootUser()).getUsersByIds([str(_userId)])
|
|
_owner = _umap.get(str(_userId))
|
|
except Exception as _ue:
|
|
logger.warning("Could not resolve workflow user for file persistence: %s", _ue)
|
|
if _owner is None:
|
|
_owner = getRootUser()
|
|
logger.debug(
|
|
"Persisting workflow document as root user (no resolved owner userId=%r)",
|
|
_userId,
|
|
)
|
|
_mgmt = _getMgmtInterface(_owner, mandateId=_mandateId, featureInstanceId=_instanceId)
|
|
_docName = dumped.get("documentName") or f"workflow-result-{nodeId}.bin"
|
|
_mimeType = dumped.get("mimeType") or "application/octet-stream"
|
|
_fileItem = _mgmt.createFile(_docName, _mimeType, rawBytes, folderId=persist_folder_id)
|
|
_mgmt.createFileData(_fileItem.id, rawBytes)
|
|
dumped["fileId"] = _fileItem.id
|
|
dumped["id"] = _fileItem.id
|
|
dumped["fileName"] = _fileItem.fileName
|
|
logger.info("Persisted workflow document %s as file %s", _docName, _fileItem.id)
|
|
except Exception as _fe:
|
|
logger.warning("Could not persist workflow document: %s", _fe)
|
|
dumped["documentData"] = None
|
|
dumped["_hasBinaryData"] = True
|
|
docsList.append(dumped)
|
|
|
|
# Clean DocumentList shape for document nodes (documents + count, no ActionResult/AiResult noise)
|
|
if outputSchema == "DocumentList":
|
|
if not result.success:
|
|
return _normalizeError(
|
|
RuntimeError(str(result.error or "document action failed")),
|
|
outputSchema,
|
|
)
|
|
list_out: Dict[str, Any] = {
|
|
"documents": docsList,
|
|
"count": len(docsList),
|
|
}
|
|
_attachConnectionProvenance(list_out, resolvedParams, outputSchema, chatService, self.services)
|
|
return normalizeToSchema(list_out, outputSchema)
|
|
|
|
extractedContext = ""
|
|
rd_early = getattr(result, "data", None)
|
|
if isinstance(rd_early, dict) and rd_early.get("response") is not None:
|
|
extractedContext = str(rd_early.get("response")).strip()
|
|
elif result.documents:
|
|
doc = result.documents[0]
|
|
raw = getattr(doc, "documentData", None) if hasattr(doc, "documentData") else (doc.get("documentData") if isinstance(doc, dict) else None)
|
|
if isinstance(raw, bytes):
|
|
try:
|
|
extractedContext = raw.decode("utf-8").strip()
|
|
except (UnicodeDecodeError, ValueError):
|
|
extractedContext = ""
|
|
elif raw:
|
|
extractedContext = str(raw).strip()
|
|
promptText = str(resolvedParams.get("aiPrompt") or resolvedParams.get("prompt") or "").strip()
|
|
|
|
resultData = getattr(result, "data", None)
|
|
if resultData and isinstance(resultData, dict):
|
|
dataField = resultData
|
|
elif hasattr(result, "model_dump"):
|
|
dataField = result.model_dump()
|
|
else:
|
|
dataField = {"success": result.success, "error": result.error}
|
|
|
|
out = {
|
|
"success": result.success,
|
|
"error": result.error,
|
|
"documents": docsList,
|
|
"data": dataField,
|
|
}
|
|
|
|
if outputSchema == "AiResult":
|
|
out["prompt"] = promptText
|
|
out["response"] = extractedContext
|
|
inputContext = resolvedParams.get("context")
|
|
if inputContext is not None:
|
|
out["context"] = inputContext if isinstance(inputContext, str) else json.dumps(inputContext, ensure_ascii=False, default=str)
|
|
else:
|
|
out["context"] = ""
|
|
# Structured output
|
|
if extractedContext:
|
|
try:
|
|
parsed = json.loads(extractedContext)
|
|
if isinstance(parsed, dict):
|
|
out["responseData"] = parsed
|
|
except (json.JSONDecodeError, TypeError):
|
|
pass
|
|
if outputSchema == "AiResult" and result.success:
|
|
out["imageDocumentsOnly"] = _image_documents_from_docs_list(docsList)
|
|
|
|
if outputSchema == "ActionResult":
|
|
# Unified handover: mirror AiResult primary paths for DataRefs / primaryTextRef
|
|
inp_ctx = resolvedParams.get("context")
|
|
ctx_str = ""
|
|
if inp_ctx is not None:
|
|
ctx_str = inp_ctx if isinstance(inp_ctx, str) else json.dumps(inp_ctx, ensure_ascii=False, default=str)
|
|
out.setdefault("prompt", "")
|
|
out.setdefault("context", ctx_str if ctx_str else "")
|
|
rsp = str(out.get("response") or "").strip()
|
|
if not rsp:
|
|
out["response"] = extractedContext or ""
|
|
if result.success:
|
|
out["imageDocumentsOnly"] = _image_documents_from_docs_list(docsList)
|
|
|
|
if outputSchema == "TaskResult" and result.success and docsList:
|
|
try:
|
|
d0 = docsList[0] if isinstance(docsList[0], dict) else {}
|
|
raw = d0.get("documentData")
|
|
if isinstance(raw, str) and raw.strip():
|
|
parsed = json.loads(raw)
|
|
if isinstance(parsed, dict) and parsed.get("id") is not None:
|
|
out["taskId"] = str(parsed["id"])
|
|
out["task"] = parsed
|
|
except (json.JSONDecodeError, TypeError, ValueError):
|
|
pass
|
|
|
|
if outputSchema == "ConsolidateResult":
|
|
data_dict = result.data if isinstance(getattr(result, "data", None), dict) else {}
|
|
cr_out = {
|
|
"result": data_dict.get("result", ""),
|
|
"mode": data_dict.get("mode", resolvedParams.get("mode", "summarize")),
|
|
"count": int(data_dict.get("count", 0)),
|
|
}
|
|
_attachConnectionProvenance(cr_out, resolvedParams, outputSchema, chatService, self.services)
|
|
return normalizeToSchema(cr_out, outputSchema)
|
|
|
|
_attachConnectionProvenance(out, resolvedParams, outputSchema, chatService, self.services)
|
|
return normalizeToSchema(out, outputSchema)
|