feat: first version of graphical automation feature
This commit is contained in:
parent
9186c60ad2
commit
c168443d4f
19 changed files with 1332 additions and 0 deletions
2
modules/features/automation2/__init__.py
Normal file
2
modules/features/automation2/__init__.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Automation2 feature - n8n-style flow automation (backup/parallel to legacy automation)
|
||||
265
modules/features/automation2/mainAutomation2.py
Normal file
265
modules/features/automation2/mainAutomation2.py
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Automation2 Feature - n8n-style flow automation.
|
||||
Minimal bootstrap for feature instance creation. Build from here.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
FEATURE_CODE = "automation2"
|
||||
|
||||
# Services required for automation2 (methodDiscovery, ActionExecutor, etc.)
|
||||
REQUIRED_SERVICES = [
|
||||
{"serviceKey": "chat", "meta": {"usage": "Interfaces, RBAC"}},
|
||||
{"serviceKey": "utils", "meta": {"usage": "Timestamps, utilities"}},
|
||||
{"serviceKey": "ai", "meta": {"usage": "AI nodes"}},
|
||||
{"serviceKey": "extraction", "meta": {"usage": "Workflow method actions"}},
|
||||
{"serviceKey": "sharepoint", "meta": {"usage": "SharePoint actions"}},
|
||||
]
|
||||
FEATURE_LABEL = {"en": "Automation 2", "de": "Automatisierung 2", "fr": "Automatisation 2"}
|
||||
FEATURE_ICON = "mdi-sitemap"
|
||||
|
||||
UI_OBJECTS = [
|
||||
{
|
||||
"objectKey": "ui.feature.automation2.dashboard",
|
||||
"label": {"en": "Dashboard", "de": "Dashboard", "fr": "Tableau de bord"},
|
||||
"meta": {"area": "dashboard"}
|
||||
},
|
||||
]
|
||||
|
||||
RESOURCE_OBJECTS = [
|
||||
{
|
||||
"objectKey": "resource.feature.automation2.dashboard",
|
||||
"label": {"en": "Access Dashboard", "de": "Dashboard aufrufen", "fr": "Acceder au tableau de bord"},
|
||||
"meta": {"endpoint": "/api/automation2/{instanceId}/info", "method": "GET"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.automation2.node-types",
|
||||
"label": {"en": "Get Node Types", "de": "Node-Typen abrufen", "fr": "Obtenir types de nœuds"},
|
||||
"meta": {"endpoint": "/api/automation2/{instanceId}/node-types", "method": "GET"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.automation2.execute",
|
||||
"label": {"en": "Execute Workflow", "de": "Workflow ausführen", "fr": "Exécuter le workflow"},
|
||||
"meta": {"endpoint": "/api/automation2/{instanceId}/execute", "method": "POST"}
|
||||
},
|
||||
]
|
||||
|
||||
TEMPLATE_ROLES = [
|
||||
{
|
||||
"roleLabel": "automation2-user",
|
||||
"description": {
|
||||
"en": "Automation2 User - Use automation2 flow builder",
|
||||
"de": "Automation2 Benutzer - Flow-Builder nutzen",
|
||||
"fr": "Utilisateur Automation2 - Utiliser le flow builder"
|
||||
},
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.automation2.dashboard", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.automation2.dashboard", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.automation2.node-types", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.automation2.execute", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
]
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def getRequiredServiceKeys() -> List[str]:
|
||||
"""Return list of service keys this feature requires."""
|
||||
return [s["serviceKey"] for s in REQUIRED_SERVICES]
|
||||
|
||||
|
||||
def getAutomation2Services(
|
||||
user,
|
||||
mandateId: Optional[str] = None,
|
||||
featureInstanceId: Optional[str] = None,
|
||||
workflow=None,
|
||||
) -> "_Automation2ServiceHub":
|
||||
"""
|
||||
Get a service hub for automation2 using the service center.
|
||||
Used for methodDiscovery (I/O nodes) and execution (ActionExecutor).
|
||||
"""
|
||||
from modules.serviceCenter import getService
|
||||
from modules.serviceCenter.context import ServiceCenterContext
|
||||
|
||||
_workflow = workflow
|
||||
if _workflow is None:
|
||||
_workflow = type("_Placeholder", (), {"featureCode": FEATURE_CODE, "id": None, "workflowMode": None})()
|
||||
|
||||
ctx = ServiceCenterContext(
|
||||
user=user,
|
||||
mandate_id=mandateId,
|
||||
feature_instance_id=featureInstanceId,
|
||||
workflow=_workflow,
|
||||
)
|
||||
|
||||
hub = _Automation2ServiceHub()
|
||||
hub.user = user
|
||||
hub.mandateId = mandateId
|
||||
hub.featureInstanceId = featureInstanceId
|
||||
hub._service_context = ctx
|
||||
hub.workflow = workflow
|
||||
hub.featureCode = FEATURE_CODE
|
||||
|
||||
for spec in REQUIRED_SERVICES:
|
||||
key = spec["serviceKey"]
|
||||
try:
|
||||
svc = getService(key, ctx)
|
||||
setattr(hub, key, svc)
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not resolve service '{key}' for automation2: {e}")
|
||||
setattr(hub, key, None)
|
||||
|
||||
if hub.chat:
|
||||
hub.interfaceDbApp = getattr(hub.chat, "interfaceDbApp", None)
|
||||
hub.interfaceDbComponent = getattr(hub.chat, "interfaceDbComponent", None)
|
||||
hub.interfaceDbChat = getattr(hub.chat, "interfaceDbChat", None)
|
||||
hub.rbac = getattr(hub.interfaceDbApp, "rbac", None) if getattr(hub, "interfaceDbApp", None) else None
|
||||
|
||||
return hub
|
||||
|
||||
|
||||
class _Automation2ServiceHub:
|
||||
"""Lightweight hub for automation2 (methodDiscovery, execution)."""
|
||||
|
||||
user = None
|
||||
mandateId = None
|
||||
featureInstanceId = None
|
||||
_service_context = None
|
||||
workflow = None
|
||||
featureCode = FEATURE_CODE
|
||||
interfaceDbApp = None
|
||||
interfaceDbComponent = None
|
||||
interfaceDbChat = None
|
||||
rbac = None
|
||||
chat = None
|
||||
ai = None
|
||||
utils = None
|
||||
extraction = None
|
||||
sharepoint = None
|
||||
|
||||
|
||||
def getFeatureDefinition() -> Dict[str, Any]:
|
||||
"""Return the feature definition for registration."""
|
||||
return {
|
||||
"code": FEATURE_CODE,
|
||||
"label": FEATURE_LABEL,
|
||||
"icon": FEATURE_ICON,
|
||||
"autoCreateInstance": True,
|
||||
}
|
||||
|
||||
|
||||
def getUiObjects() -> List[Dict[str, Any]]:
|
||||
"""Return UI objects for RBAC catalog registration."""
|
||||
return UI_OBJECTS
|
||||
|
||||
|
||||
def getResourceObjects() -> List[Dict[str, Any]]:
|
||||
"""Return resource objects for RBAC catalog registration."""
|
||||
return RESOURCE_OBJECTS
|
||||
|
||||
|
||||
def getTemplateRoles() -> List[Dict[str, Any]]:
|
||||
"""Return template roles for this feature."""
|
||||
return TEMPLATE_ROLES
|
||||
|
||||
|
||||
def registerFeature(catalogService) -> bool:
|
||||
"""Register this feature's RBAC objects in the catalog."""
|
||||
try:
|
||||
for uiObj in UI_OBJECTS:
|
||||
catalogService.registerUiObject(
|
||||
featureCode=FEATURE_CODE,
|
||||
objectKey=uiObj["objectKey"],
|
||||
label=uiObj["label"],
|
||||
meta=uiObj.get("meta")
|
||||
)
|
||||
for resObj in RESOURCE_OBJECTS:
|
||||
catalogService.registerResourceObject(
|
||||
featureCode=FEATURE_CODE,
|
||||
objectKey=resObj["objectKey"],
|
||||
label=resObj["label"],
|
||||
meta=resObj.get("meta")
|
||||
)
|
||||
_syncTemplateRolesToDb()
|
||||
logger.info(f"Feature '{FEATURE_CODE}' registered {len(UI_OBJECTS)} UI objects and {len(RESOURCE_OBJECTS)} resource objects")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register feature '{FEATURE_CODE}': {e}")
|
||||
return False
|
||||
|
||||
|
||||
def _syncTemplateRolesToDb() -> int:
|
||||
"""Sync template roles and their AccessRules to database."""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
existingRoles = rootInterface.getRolesByFeatureCode(FEATURE_CODE)
|
||||
existingLabels = {r.roleLabel: str(r.id) for r in existingRoles if r.mandateId is None}
|
||||
created = 0
|
||||
|
||||
for template in TEMPLATE_ROLES:
|
||||
roleLabel = template["roleLabel"]
|
||||
if roleLabel in existingLabels:
|
||||
roleId = existingLabels[roleLabel]
|
||||
else:
|
||||
newRole = Role(
|
||||
roleLabel=roleLabel,
|
||||
description=template.get("description", {}),
|
||||
featureCode=FEATURE_CODE,
|
||||
mandateId=None,
|
||||
featureInstanceId=None,
|
||||
isSystemRole=False
|
||||
)
|
||||
rec = rootInterface.db.recordCreate(Role, newRole.model_dump())
|
||||
roleId = rec.get("id")
|
||||
created += 1
|
||||
logger.info(f"Created template role '{roleLabel}' for {FEATURE_CODE}")
|
||||
|
||||
_ensureAccessRulesForRole(rootInterface, roleId, template.get("accessRules", []))
|
||||
return created
|
||||
except Exception as e:
|
||||
logger.warning(f"Template role sync for {FEATURE_CODE}: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def _ensureAccessRulesForRole(rootInterface, roleId: str, ruleTemplates: List[Dict[str, Any]]) -> int:
|
||||
"""Ensure AccessRules exist for a role based on templates."""
|
||||
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
|
||||
|
||||
existingRules = rootInterface.getAccessRulesByRole(roleId)
|
||||
existingSignatures = {
|
||||
(r.context.value if r.context else None, r.item)
|
||||
for r in existingRules
|
||||
}
|
||||
created = 0
|
||||
for t in ruleTemplates:
|
||||
context = t.get("context", "UI")
|
||||
item = t.get("item")
|
||||
sig = (context, item)
|
||||
if sig in existingSignatures:
|
||||
continue
|
||||
ctx_enum = (
|
||||
AccessRuleContext.UI if context == "UI" else
|
||||
AccessRuleContext.DATA if context == "DATA" else
|
||||
AccessRuleContext.RESOURCE if context == "RESOURCE" else context
|
||||
)
|
||||
newRule = AccessRule(
|
||||
roleId=roleId,
|
||||
context=ctx_enum,
|
||||
item=item,
|
||||
view=t.get("view", False),
|
||||
read=t.get("read"),
|
||||
create=t.get("create"),
|
||||
update=t.get("update"),
|
||||
delete=t.get("delete"),
|
||||
)
|
||||
rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
|
||||
created += 1
|
||||
return created
|
||||
8
modules/features/automation2/nodeDefinitions/__init__.py
Normal file
8
modules/features/automation2/nodeDefinitions/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Node type definitions for automation2 flow builder.
|
||||
|
||||
from .triggers import TRIGGER_NODES
|
||||
from .flow import FLOW_NODES
|
||||
from .data import DATA_NODES
|
||||
|
||||
STATIC_NODE_TYPES = TRIGGER_NODES + FLOW_NODES + DATA_NODES
|
||||
58
modules/features/automation2/nodeDefinitions/data.py
Normal file
58
modules/features/automation2/nodeDefinitions/data.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Data transformation node definitions.
|
||||
|
||||
DATA_NODES = [
|
||||
{
|
||||
"id": "data.setFields",
|
||||
"category": "data",
|
||||
"label": {"en": "Set Fields", "de": "Felder setzen", "fr": "Définir champs"},
|
||||
"description": {"en": "Set or override fields on payload", "de": "Felder setzen oder überschreiben", "fr": "Définir ou écraser des champs"},
|
||||
"parameters": [
|
||||
{"name": "fields", "type": "object", "required": True, "description": {"en": "Key-value pairs", "de": "Schlüssel-Wert-Paare", "fr": "Paires clé-valeur"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-pencil", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.filter",
|
||||
"category": "data",
|
||||
"label": {"en": "Filter", "de": "Filtern", "fr": "Filtrer"},
|
||||
"description": {"en": "Filter array by condition", "de": "Array nach Bedingung filtern", "fr": "Filtrer tableau par condition"},
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "description": {"en": "Expression (e.g. item.active == true)", "de": "Bedingung", "fr": "Condition"}},
|
||||
{"name": "itemsPath", "type": "string", "required": False, "description": {"en": "Path to array", "de": "Pfad zum Array", "fr": "Chemin vers le tableau"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-filter", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.parseJson",
|
||||
"category": "data",
|
||||
"label": {"en": "Parse JSON", "de": "JSON parsen", "fr": "Parser JSON"},
|
||||
"description": {"en": "Parse JSON string to object", "de": "JSON-String in Objekt parsen", "fr": "Parser chaîne JSON en objet"},
|
||||
"parameters": [
|
||||
{"name": "jsonPath", "type": "string", "required": False, "description": {"en": "Path to JSON string (default: input)", "de": "Pfad zum JSON", "fr": "Chemin vers JSON"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-code-json", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.template",
|
||||
"category": "data",
|
||||
"label": {"en": "Template / Interpolation", "de": "Vorlage / Interpolation", "fr": "Modèle / Interpolation"},
|
||||
"description": {"en": "Text with {{placeholder}} substitution", "de": "Text mit {{platzhalter}}-Ersetzung", "fr": "Texte avec substitution {{placeholder}}"},
|
||||
"parameters": [
|
||||
{"name": "template", "type": "string", "required": True, "description": {"en": "Template (use {{path}} for values)", "de": "Vorlage", "fr": "Modèle"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-format-text", "color": "#673AB7"},
|
||||
},
|
||||
]
|
||||
82
modules/features/automation2/nodeDefinitions/flow.py
Normal file
82
modules/features/automation2/nodeDefinitions/flow.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Flow control node definitions.
|
||||
|
||||
FLOW_NODES = [
|
||||
{
|
||||
"id": "flow.ifElse",
|
||||
"category": "flow",
|
||||
"label": {"en": "If / Else", "de": "Wenn / Sonst", "fr": "Si / Sinon"},
|
||||
"description": {"en": "Branch based on condition", "de": "Verzweigung nach Bedingung", "fr": "Branche selon condition"},
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "description": {"en": "Expression to evaluate (e.g. {{value}} > 0)", "de": "Bedingung", "fr": "Condition"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 2,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.switch",
|
||||
"category": "flow",
|
||||
"label": {"en": "Switch", "de": "Switch", "fr": "Switch"},
|
||||
"description": {"en": "Multiple branches based on value", "de": "Mehrere Zweige nach Wert", "fr": "Branches multiples selon valeur"},
|
||||
"parameters": [
|
||||
{"name": "value", "type": "string", "required": True, "description": {"en": "Value to match", "de": "Zu vergleichender Wert", "fr": "Valeur à comparer"}},
|
||||
{"name": "cases", "type": "array", "required": False, "description": {"en": "List of cases", "de": "Fälle", "fr": "Cas"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.merge",
|
||||
"category": "flow",
|
||||
"label": {"en": "Merge", "de": "Zusammenführen", "fr": "Fusionner"},
|
||||
"description": {"en": "Merge multiple inputs", "de": "Mehrere Eingaben zusammenführen", "fr": "Fusionner plusieurs entrées"},
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "description": {"en": "append | combine", "de": "Modus", "fr": "Mode"}},
|
||||
],
|
||||
"inputs": 2,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-merge", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.loop",
|
||||
"category": "flow",
|
||||
"label": {"en": "Loop / For Each", "de": "Schleife / Für Jedes", "fr": "Boucle / Pour Chaque"},
|
||||
"description": {"en": "Iterate over array items", "de": "Über Array-Elemente iterieren", "fr": "Itérer sur les éléments"},
|
||||
"parameters": [
|
||||
{"name": "items", "type": "string", "required": True, "description": {"en": "Path to array (e.g. {{input.items}})", "de": "Pfad zum Array", "fr": "Chemin vers le tableau"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.wait",
|
||||
"category": "flow",
|
||||
"label": {"en": "Wait / Delay", "de": "Warten / Verzögerung", "fr": "Attendre / Délai"},
|
||||
"description": {"en": "Pause for duration", "de": "Pause für Dauer", "fr": "Pause pour durée"},
|
||||
"parameters": [
|
||||
{"name": "seconds", "type": "number", "required": True, "description": {"en": "Seconds to wait", "de": "Sekunden", "fr": "Secondes"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-timer", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.stop",
|
||||
"category": "flow",
|
||||
"label": {"en": "Stop / Terminate", "de": "Stopp / Beenden", "fr": "Arrêter / Terminer"},
|
||||
"description": {"en": "Stop workflow execution", "de": "Workflow-Ausführung beenden", "fr": "Arrêter l'exécution"},
|
||||
"parameters": [],
|
||||
"inputs": 1,
|
||||
"outputs": 0,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-stop", "color": "#F44336"},
|
||||
},
|
||||
]
|
||||
42
modules/features/automation2/nodeDefinitions/triggers.py
Normal file
42
modules/features/automation2/nodeDefinitions/triggers.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Trigger node definitions - workflow entry points.
|
||||
|
||||
TRIGGER_NODES = [
|
||||
{
|
||||
"id": "trigger.manual",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Manual Trigger", "de": "Manueller Trigger", "fr": "Déclencheur manuel"},
|
||||
"description": {"en": "Start workflow on button press", "de": "Startet den Workflow bei Knopfdruck", "fr": "Démarre le workflow sur clic"},
|
||||
"parameters": [],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-play", "color": "#4CAF50"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.schedule",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Schedule", "de": "Zeitplan", "fr": "Planification"},
|
||||
"description": {"en": "Run on a cron schedule", "de": "Läuft nach Cron-Zeitplan", "fr": "S'exécute selon un cron"},
|
||||
"parameters": [
|
||||
{"name": "cron", "type": "string", "required": True, "description": {"en": "Cron expression (e.g. 0 9 * * * for daily at 9)", "de": "Cron-Ausdruck", "fr": "Expression cron"}},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.formSubmit",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Form Submit", "de": "Formular-Absendung", "fr": "Soumission formulaire"},
|
||||
"description": {"en": "Start when form is submitted", "de": "Startet bei Formular-Absendung", "fr": "Démarre à la soumission du formulaire"},
|
||||
"parameters": [
|
||||
{"name": "formId", "type": "string", "required": True, "description": {"en": "Form identifier", "de": "Formular-ID", "fr": "Identifiant du formulaire"}},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0"},
|
||||
},
|
||||
]
|
||||
167
modules/features/automation2/nodeRegistry.py
Normal file
167
modules/features/automation2/nodeRegistry.py
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Node Type Registry for automation2 - merges static definitions with dynamic I/O nodes from methodDiscovery.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
from modules.features.automation2.nodeDefinitions import STATIC_NODE_TYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Short method names that map to I/O node category display
|
||||
METHOD_LABELS = {
|
||||
"outlook": {"en": "Outlook", "de": "Outlook", "fr": "Outlook"},
|
||||
"sharepoint": {"en": "SharePoint", "de": "SharePoint", "fr": "SharePoint"},
|
||||
"context": {"en": "Context", "de": "Kontext", "fr": "Contexte"},
|
||||
"ai": {"en": "AI", "de": "KI", "fr": "IA"},
|
||||
"trustee": {"en": "Trustee", "de": "Trustee", "fr": "Trustee"},
|
||||
"jira": {"en": "Jira", "de": "Jira", "fr": "Jira"},
|
||||
"chatbot": {"en": "Chatbot", "de": "Chatbot", "fr": "Chatbot"},
|
||||
}
|
||||
|
||||
|
||||
def _actionNameToLabel(actionName: str) -> str:
|
||||
"""Convert camelCase actionName to readable label."""
|
||||
import re
|
||||
parts = re.sub(r"([A-Z])", r" \1", actionName).strip().split()
|
||||
return " ".join(p.capitalize() for p in parts) if parts else actionName
|
||||
|
||||
|
||||
def _buildIoNodeFromAction(
|
||||
shortMethod: str,
|
||||
actionName: str,
|
||||
actionDef: Dict[str, Any],
|
||||
language: str = "en",
|
||||
) -> Dict[str, Any]:
|
||||
"""Build a single I/O node definition from a method action."""
|
||||
lang = language if language in ("en", "de", "fr") else "en"
|
||||
methodLabel = METHOD_LABELS.get(shortMethod, {}).get(lang, shortMethod)
|
||||
actionLabel = _actionNameToLabel(actionName)
|
||||
nodeId = f"io.{shortMethod}.{actionName}"
|
||||
nodeLabel = {l: f"{METHOD_LABELS.get(shortMethod, {}).get(l, shortMethod)} - {_actionNameToLabel(actionName)}" for l in ("en", "de", "fr")}
|
||||
|
||||
parameters = []
|
||||
paramDefs = actionDef.get("parameters", {})
|
||||
for paramName, paramInfo in paramDefs.items():
|
||||
if isinstance(paramInfo, dict):
|
||||
p = {
|
||||
"name": paramName,
|
||||
"type": paramInfo.get("type", "str"),
|
||||
"required": paramInfo.get("required", False),
|
||||
"description": paramInfo.get("description", ""),
|
||||
}
|
||||
if paramInfo.get("default") is not None:
|
||||
p["default"] = paramInfo["default"]
|
||||
parameters.append(p)
|
||||
else:
|
||||
parameters.append({
|
||||
"name": paramName,
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"description": str(paramInfo),
|
||||
})
|
||||
|
||||
return {
|
||||
"id": nodeId,
|
||||
"category": "io",
|
||||
"label": nodeLabel,
|
||||
"description": actionDef.get("description") or nodeLabel,
|
||||
"parameters": parameters,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "io",
|
||||
"meta": {"icon": "mdi-connection", "color": "#00BCD4", "method": shortMethod, "action": actionName},
|
||||
}
|
||||
|
||||
|
||||
def getIoNodesFromMethods(methods: Dict[str, Any], language: str = "en") -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Build I/O node types from methodDiscovery.methods.
|
||||
methods: { methodName: { instance, actions: { actionName: { description, parameters, method } } } }
|
||||
Returns list of node definitions for io.{shortMethod}.{actionName}.
|
||||
"""
|
||||
ioNodes = []
|
||||
processed = set()
|
||||
|
||||
for methodName, methodInfo in methods.items():
|
||||
if not methodName.startswith("Method"):
|
||||
continue
|
||||
shortMethod = methodName.replace("Method", "").lower()
|
||||
if shortMethod in processed:
|
||||
continue
|
||||
processed.add(shortMethod)
|
||||
|
||||
methodInstance = methodInfo.get("instance")
|
||||
if not methodInstance:
|
||||
continue
|
||||
|
||||
actions = methodInstance.actions
|
||||
for actionName, actionDef in actions.items():
|
||||
if not isinstance(actionDef, dict):
|
||||
continue
|
||||
try:
|
||||
node = _buildIoNodeFromAction(shortMethod, actionName, actionDef, language)
|
||||
ioNodes.append(node)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to build I/O node io.{shortMethod}.{actionName}: {e}")
|
||||
continue
|
||||
|
||||
return ioNodes
|
||||
|
||||
|
||||
def getNodeTypes(
|
||||
services: Any,
|
||||
language: str = "en",
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return merged node types: static (trigger, flow, data) + dynamic I/O nodes from methodDiscovery.
|
||||
services: Hub from getAutomation2Services (needed for discoverMethods + RBAC-filtered actions).
|
||||
"""
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods, methods
|
||||
|
||||
discoverMethods(services)
|
||||
|
||||
static = list(STATIC_NODE_TYPES)
|
||||
ioNodes = getIoNodesFromMethods(methods, language)
|
||||
return static + ioNodes
|
||||
|
||||
|
||||
def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
||||
"""Apply language to label/description/parameters."""
|
||||
lang = language if language in ("en", "de", "fr") else "en"
|
||||
out = dict(node)
|
||||
if isinstance(node.get("label"), dict):
|
||||
out["label"] = node["label"].get(lang, node["label"].get("en", str(node["label"])))
|
||||
if isinstance(node.get("description"), dict):
|
||||
out["description"] = node["description"].get(lang, node["description"].get("en", str(node["description"])))
|
||||
params = []
|
||||
for p in node.get("parameters", []):
|
||||
pc = dict(p)
|
||||
if isinstance(p.get("description"), dict):
|
||||
pc["description"] = p["description"].get(lang, p["description"].get("en", str(p.get("description", ""))))
|
||||
params.append(pc)
|
||||
out["parameters"] = params
|
||||
return out
|
||||
|
||||
|
||||
def getNodeTypesForApi(
|
||||
services: Any,
|
||||
language: str = "en",
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
API-ready response: nodeTypes with localized strings, plus categories list.
|
||||
"""
|
||||
nodes = getNodeTypes(services, language)
|
||||
localized = [_localizeNode(n, language) for n in nodes]
|
||||
categories = [
|
||||
{"id": "trigger", "label": {"en": "Trigger", "de": "Trigger", "fr": "Déclencheur"}},
|
||||
{"id": "input", "label": {"en": "Input", "de": "Eingabe", "fr": "Entrée"}},
|
||||
{"id": "flow", "label": {"en": "Flow", "de": "Ablauf", "fr": "Flux"}},
|
||||
{"id": "data", "label": {"en": "Data", "de": "Daten", "fr": "Données"}},
|
||||
{"id": "io", "label": {"en": "I/O", "de": "E/A", "fr": "E/S"}},
|
||||
{"id": "human", "label": {"en": "Human", "de": "Mensch", "fr": "Humain"}},
|
||||
]
|
||||
return {"nodeTypes": localized, "categories": categories}
|
||||
96
modules/features/automation2/routeFeatureAutomation2.py
Normal file
96
modules/features/automation2/routeFeatureAutomation2.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Automation2 routes - node-types, execute, info.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, Path, Query, Body
|
||||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
|
||||
from modules.features.automation2.mainAutomation2 import getAutomation2Services
|
||||
from modules.features.automation2.nodeRegistry import getNodeTypesForApi
|
||||
from modules.workflows.automation2.executionEngine import executeGraph
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/automation2",
|
||||
tags=["Automation2"],
|
||||
responses={404: {"description": "Not found"}, 403: {"description": "Forbidden"}},
|
||||
)
|
||||
|
||||
|
||||
def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
|
||||
"""Validate user has access to the automation2 feature instance. Returns mandateId."""
|
||||
from fastapi import HTTPException
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
instance = rootInterface.getFeatureInstance(instanceId)
|
||||
if not instance:
|
||||
raise HTTPException(status_code=404, detail=f"Feature instance {instanceId} not found")
|
||||
featureAccess = rootInterface.getFeatureAccess(str(context.user.id), instanceId)
|
||||
if not featureAccess or not featureAccess.enabled:
|
||||
raise HTTPException(status_code=403, detail="Access denied to this feature instance")
|
||||
return str(instance.mandateId) if instance.mandateId else ""
|
||||
|
||||
|
||||
@router.get("/{instanceId}/info")
|
||||
@limiter.limit("60/minute")
|
||||
def get_automation2_info(
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Minimal info endpoint - proves the feature works."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
return {
|
||||
"featureCode": "automation2",
|
||||
"instanceId": instanceId,
|
||||
"status": "ok",
|
||||
"message": "Automation2 feature ready. Build from here.",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/node-types")
|
||||
@limiter.limit("60/minute")
|
||||
def get_node_types(
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
language: str = Query("en", description="Localization (en, de, fr)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return node types for the flow builder: static + I/O from methodDiscovery."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
return getNodeTypesForApi(services, language=language)
|
||||
|
||||
|
||||
@router.post("/{instanceId}/execute")
|
||||
@limiter.limit("30/minute")
|
||||
async def post_execute(
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
body: dict = Body(..., description="{ workflowId?, graph: { nodes, connections } }"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Execute automation2 graph. Body: { workflowId?, graph: { nodes, connections } }."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
graph = body.get("graph") or body
|
||||
workflowId = body.get("workflowId")
|
||||
result = await executeGraph(
|
||||
graph=graph,
|
||||
services=services,
|
||||
workflowId=workflowId,
|
||||
instanceId=instanceId,
|
||||
userId=str(context.user.id) if context.user else None,
|
||||
mandateId=mandateId,
|
||||
)
|
||||
return result
|
||||
|
|
@ -105,6 +105,9 @@ def _getFeatureUiObjects(featureCode: str) -> List[Dict[str, Any]]:
|
|||
elif featureCode == "automation":
|
||||
from modules.features.automation.mainAutomation import UI_OBJECTS
|
||||
return UI_OBJECTS
|
||||
elif featureCode == "automation2":
|
||||
from modules.features.automation2.mainAutomation2 import UI_OBJECTS
|
||||
return UI_OBJECTS
|
||||
elif featureCode == "teamsbot":
|
||||
from modules.features.teamsbot.mainTeamsbot import UI_OBJECTS
|
||||
return UI_OBJECTS
|
||||
|
|
|
|||
|
|
@ -437,6 +437,11 @@ RESOURCE_OBJECTS = [
|
|||
"label": {"en": "Store: Automation", "de": "Store: Automation", "fr": "Store: Automatisation"},
|
||||
"meta": {"category": "store", "featureCode": "automation"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.store.automation2",
|
||||
"label": {"en": "Store: Automation 2", "de": "Store: Automation 2", "fr": "Store: Automatisation 2"},
|
||||
"meta": {"category": "store", "featureCode": "automation2"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.store.teamsbot",
|
||||
"label": {"en": "Store: Teams Bot", "de": "Store: Teams Bot", "fr": "Store: Teams Bot"},
|
||||
|
|
|
|||
2
modules/workflows/automation2/__init__.py
Normal file
2
modules/workflows/automation2/__init__.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# automation2 - n8n-style graph execution engine.
|
||||
115
modules/workflows/automation2/executionEngine.py
Normal file
115
modules/workflows/automation2/executionEngine.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Main execution engine for automation2 graphs.
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any, List, Set
|
||||
|
||||
from modules.workflows.automation2.graphUtils import (
|
||||
parseGraph,
|
||||
buildConnectionMap,
|
||||
validateGraph,
|
||||
topoSort,
|
||||
getInputSources,
|
||||
)
|
||||
|
||||
from modules.workflows.automation2.executors import (
|
||||
TriggerExecutor,
|
||||
FlowExecutor,
|
||||
DataExecutor,
|
||||
IOExecutor,
|
||||
)
|
||||
from modules.features.automation2.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods, methods
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _getNodeTypeIds(services: Any) -> Set[str]:
|
||||
"""Collect all known node type IDs from static + methodDiscovery."""
|
||||
discoverMethods(services)
|
||||
ids = {n["id"] for n in STATIC_NODE_TYPES}
|
||||
for methodName, methodInfo in methods.items():
|
||||
if not methodName.startswith("Method"):
|
||||
continue
|
||||
shortMethod = methodName.replace("Method", "").lower()
|
||||
instance = methodInfo.get("instance")
|
||||
if not instance:
|
||||
continue
|
||||
for actionName in instance.actions:
|
||||
ids.add(f"io.{shortMethod}.{actionName}")
|
||||
return ids
|
||||
|
||||
|
||||
def _getExecutor(nodeType: str, services: Any) -> Any:
|
||||
"""Dispatch to correct executor based on node type."""
|
||||
if nodeType.startswith("trigger."):
|
||||
return TriggerExecutor()
|
||||
if nodeType.startswith("flow."):
|
||||
return FlowExecutor()
|
||||
if nodeType.startswith("data."):
|
||||
return DataExecutor()
|
||||
if nodeType.startswith("io."):
|
||||
return IOExecutor(services)
|
||||
return None
|
||||
|
||||
|
||||
async def executeGraph(
|
||||
graph: Dict[str, Any],
|
||||
services: Any,
|
||||
workflowId: str = None,
|
||||
instanceId: str = None,
|
||||
userId: str = None,
|
||||
mandateId: str = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute automation2 graph. Returns { success, nodeOutputs, error?, stopped? }.
|
||||
"""
|
||||
nodeTypeIds = _getNodeTypeIds(services)
|
||||
errors = validateGraph(graph, nodeTypeIds)
|
||||
if errors:
|
||||
return {"success": False, "error": "; ".join(errors), "nodeOutputs": {}}
|
||||
|
||||
nodes, connections = parseGraph(graph)[:2]
|
||||
connectionMap = buildConnectionMap(connections)
|
||||
inputSources = {n["id"]: getInputSources(n["id"], connectionMap) for n in nodes if n.get("id")}
|
||||
|
||||
ordered = topoSort(nodes, connectionMap)
|
||||
nodeOutputs: Dict[str, Any] = {}
|
||||
context = {
|
||||
"workflowId": workflowId,
|
||||
"instanceId": instanceId,
|
||||
"userId": userId,
|
||||
"mandateId": mandateId,
|
||||
"nodeOutputs": nodeOutputs,
|
||||
"connectionMap": connectionMap,
|
||||
"inputSources": inputSources,
|
||||
"services": services,
|
||||
}
|
||||
|
||||
for node in ordered:
|
||||
if context.get("_stopped"):
|
||||
break
|
||||
nodeId = node.get("id")
|
||||
nodeType = node.get("type", "")
|
||||
executor = _getExecutor(nodeType, services)
|
||||
if not executor:
|
||||
nodeOutputs[nodeId] = None
|
||||
continue
|
||||
try:
|
||||
result = await executor.execute(node, context)
|
||||
nodeOutputs[nodeId] = result
|
||||
except Exception as e:
|
||||
logger.exception(f"automation2 execution failed for node {nodeId} ({nodeType})")
|
||||
nodeOutputs[nodeId] = {"error": str(e), "success": False}
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"nodeOutputs": nodeOutputs,
|
||||
"failedNode": nodeId,
|
||||
}
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"nodeOutputs": nodeOutputs,
|
||||
"stopped": context.get("_stopped", False),
|
||||
}
|
||||
9
modules/workflows/automation2/executors/__init__.py
Normal file
9
modules/workflows/automation2/executors/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Executors for automation2 node types.
|
||||
|
||||
from .triggerExecutor import TriggerExecutor
|
||||
from .flowExecutor import FlowExecutor
|
||||
from .dataExecutor import DataExecutor
|
||||
from .ioExecutor import IOExecutor
|
||||
|
||||
__all__ = ["TriggerExecutor", "FlowExecutor", "DataExecutor", "IOExecutor"]
|
||||
104
modules/workflows/automation2/executors/dataExecutor.py
Normal file
104
modules/workflows/automation2/executors/dataExecutor.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Data transformation node executor (setFields, filter, parseJson, template).
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Dict, Any, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_nested(obj: Any, path: str) -> Any:
|
||||
"""Get nested key from obj, e.g. 'data.items'."""
|
||||
for k in path.split("."):
|
||||
if not k:
|
||||
continue
|
||||
if isinstance(obj, dict) and k in obj:
|
||||
obj = obj[k]
|
||||
elif isinstance(obj, (list, tuple)) and k.isdigit():
|
||||
obj = obj[int(k)]
|
||||
else:
|
||||
return None
|
||||
return obj
|
||||
|
||||
|
||||
class DataExecutor:
|
||||
"""Execute data transformation nodes."""
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
nodeType = node.get("type", "")
|
||||
nodeOutputs = context.get("nodeOutputs", {})
|
||||
nodeId = node.get("id", "")
|
||||
inputSources = context.get("inputSources", {}).get(nodeId, {})
|
||||
params = node.get("parameters") or {}
|
||||
|
||||
inp = None
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = nodeOutputs.get(srcId)
|
||||
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolvedParams = {k: resolveParameterReferences(v, nodeOutputs) for k, v in params.items()}
|
||||
|
||||
if nodeType == "data.setFields":
|
||||
return self._setFields(inp, resolvedParams)
|
||||
if nodeType == "data.filter":
|
||||
return self._filter(inp, resolvedParams)
|
||||
if nodeType == "data.parseJson":
|
||||
return self._parseJson(inp, resolvedParams)
|
||||
if nodeType == "data.template":
|
||||
return self._template(inp, resolvedParams, nodeOutputs)
|
||||
|
||||
return inp
|
||||
|
||||
def _setFields(self, inp: Any, params: Dict) -> Any:
|
||||
fields = params.get("fields", {})
|
||||
if not isinstance(fields, dict):
|
||||
return inp
|
||||
base = dict(inp) if isinstance(inp, dict) else {}
|
||||
base.update(fields)
|
||||
return base
|
||||
|
||||
def _filter(self, inp: Any, params: Dict) -> Any:
|
||||
itemsPath = (params.get("itemsPath") or "").strip()
|
||||
condition = params.get("condition", "True")
|
||||
items = inp
|
||||
if itemsPath:
|
||||
items = _get_nested(inp, itemsPath)
|
||||
if not isinstance(items, list):
|
||||
items = [inp] if inp is not None else []
|
||||
out = []
|
||||
for i, item in enumerate(items):
|
||||
try:
|
||||
local = {"item": item, "index": i, "input": inp}
|
||||
ok = bool(eval(condition, {"__builtins__": {}}, local))
|
||||
if ok:
|
||||
out.append(item)
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def _parseJson(self, inp: Any, params: Dict) -> Any:
|
||||
jsonPath = (params.get("jsonPath") or "").strip()
|
||||
raw = inp
|
||||
if jsonPath:
|
||||
raw = _get_nested(inp, jsonPath) if isinstance(inp, dict) else inp
|
||||
if isinstance(raw, dict):
|
||||
return raw
|
||||
if isinstance(raw, str):
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except json.JSONDecodeError:
|
||||
return {"error": "Invalid JSON", "raw": raw[:200]}
|
||||
return inp
|
||||
|
||||
def _template(self, inp: Any, params: Dict, nodeOutputs: Dict) -> Any:
|
||||
tpl = params.get("template", "")
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
result = resolveParameterReferences(tpl, nodeOutputs)
|
||||
return {"text": result, "template": tpl}
|
||||
127
modules/workflows/automation2/executors/flowExecutor.py
Normal file
127
modules/workflows/automation2/executors/flowExecutor.py
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Flow control node executor (ifElse, merge, wait, stop).
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlowExecutor:
|
||||
"""Execute flow control nodes."""
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
nodeType = node.get("type", "")
|
||||
nodeOutputs = context.get("nodeOutputs", {})
|
||||
connectionMap = context.get("connectionMap", {})
|
||||
nodeId = node.get("id", "")
|
||||
inputSources = context.get("inputSources", {}).get(nodeId, {})
|
||||
|
||||
if nodeType == "flow.ifElse":
|
||||
return await self._ifElse(node, nodeOutputs, nodeId, inputSources)
|
||||
if nodeType == "flow.merge":
|
||||
return await self._merge(node, nodeOutputs, nodeId, inputSources)
|
||||
if nodeType == "flow.wait":
|
||||
return await self._wait(node, nodeOutputs, nodeId, inputSources)
|
||||
if nodeType == "flow.stop":
|
||||
context["_stopped"] = True
|
||||
return {"stopped": True}
|
||||
if nodeType == "flow.switch":
|
||||
return await self._switch(node, nodeOutputs, nodeId, inputSources)
|
||||
if nodeType == "flow.loop":
|
||||
return await self._loop(node, nodeOutputs, nodeId, inputSources)
|
||||
|
||||
return None
|
||||
|
||||
def _getInputData(self, nodeId: str, inputSources: Dict, nodeOutputs: Dict, outputIndex: int = 0) -> Any:
|
||||
"""Get data from the connected source node."""
|
||||
sources = inputSources.get(nodeId, {})
|
||||
if 0 not in sources:
|
||||
return None
|
||||
srcId, srcOut = sources[0]
|
||||
return nodeOutputs.get(srcId)
|
||||
|
||||
async def _ifElse(
|
||||
self,
|
||||
node: Dict,
|
||||
nodeOutputs: Dict,
|
||||
nodeId: str,
|
||||
inputSources: Dict,
|
||||
) -> Any:
|
||||
condExpr = (node.get("parameters") or {}).get("condition", "")
|
||||
inp = self._getInputData(nodeId, {nodeId: inputSources}, nodeOutputs)
|
||||
# Simple eval - in production use safe evaluation
|
||||
try:
|
||||
# Replace {{nodeId}} refs with actual values
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolved = resolveParameterReferences(condExpr, nodeOutputs)
|
||||
# Minimal eval for simple comparisons (e.g. "True", "1 > 0")
|
||||
ok = bool(eval(resolved)) if resolved else False
|
||||
except Exception:
|
||||
ok = False
|
||||
return {"branch": 0 if ok else 1, "conditionResult": ok, "input": inp}
|
||||
|
||||
async def _merge(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
mode = (node.get("parameters") or {}).get("mode", "append")
|
||||
sources = inputSources
|
||||
items = []
|
||||
for inpIdx in sorted(sources.keys()):
|
||||
srcId, _ = sources[inpIdx]
|
||||
data = nodeOutputs.get(srcId)
|
||||
if data is not None:
|
||||
if isinstance(data, list):
|
||||
items.extend(data)
|
||||
else:
|
||||
items.append(data)
|
||||
if mode == "combine" and len(items) == 2:
|
||||
if isinstance(items[0], dict) and isinstance(items[1], dict):
|
||||
return {**items[0], **items[1]}
|
||||
return items
|
||||
|
||||
async def _wait(self, node: Dict, nodeOutputs: Dict) -> Any:
|
||||
secs = (node.get("parameters") or {}).get("seconds", 0)
|
||||
if secs > 0:
|
||||
await asyncio.sleep(min(float(secs), 300))
|
||||
nodeId = node.get("id")
|
||||
from modules.workflows.automation2.graphUtils import getInputSources
|
||||
# Input comes from context
|
||||
inp = context.get("_inputData") if "context" in dir() else None
|
||||
return nodeOutputs.get(nodeId, {})
|
||||
|
||||
async def _wait(
|
||||
self,
|
||||
node: Dict,
|
||||
nodeOutputs: Dict,
|
||||
nodeId: str,
|
||||
inputSources: Dict,
|
||||
) -> Any:
|
||||
secs = (node.get("parameters") or {}).get("seconds", 0)
|
||||
if secs > 0:
|
||||
await asyncio.sleep(min(float(secs), 300))
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
return nodeOutputs.get(srcId)
|
||||
return None
|
||||
|
||||
async def _switch(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
valueExpr = (node.get("parameters") or {}).get("value", "")
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
value = resolveParameterReferences(valueExpr, nodeOutputs)
|
||||
cases = (node.get("parameters") or {}).get("cases", [])
|
||||
for i, c in enumerate(cases):
|
||||
if c == value:
|
||||
return {"match": i, "value": value}
|
||||
return {"match": -1, "value": value}
|
||||
|
||||
async def _loop(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
itemsPath = (node.get("parameters") or {}).get("items", "[]")
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
items = resolveParameterReferences(itemsPath, nodeOutputs)
|
||||
if not isinstance(items, list):
|
||||
items = [items] if items is not None else []
|
||||
return {"items": items, "count": len(items)}
|
||||
56
modules/workflows/automation2/executors/ioExecutor.py
Normal file
56
modules/workflows/automation2/executors/ioExecutor.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# I/O node executor - delegates to ActionExecutor.
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IOExecutor:
|
||||
"""Execute I/O nodes by calling ActionExecutor.executeAction(method, action, params)."""
|
||||
|
||||
def __init__(self, services: Any):
|
||||
self.services = services
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
from modules.workflows.processing.core.actionExecutor import ActionExecutor
|
||||
|
||||
nodeType = node.get("type", "")
|
||||
if not nodeType.startswith("io."):
|
||||
return None
|
||||
|
||||
parts = nodeType.split(".", 2)
|
||||
if len(parts) < 3:
|
||||
return None
|
||||
_, methodName, actionName = parts
|
||||
|
||||
nodeOutputs = context.get("nodeOutputs", {})
|
||||
params = dict(node.get("parameters") or {})
|
||||
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolvedParams = resolveParameterReferences(params, nodeOutputs)
|
||||
|
||||
nodeId = node.get("id", "")
|
||||
inputSources = context.get("inputSources", {}).get(nodeId, {})
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = nodeOutputs.get(srcId)
|
||||
if isinstance(inp, dict):
|
||||
resolvedParams.setdefault("documentList", inp.get("documents", inp.get("documentList", [])))
|
||||
elif inp is not None:
|
||||
resolvedParams.setdefault("input", inp)
|
||||
|
||||
executor = ActionExecutor(self.services)
|
||||
result = await executor.executeAction(methodName, actionName, resolvedParams)
|
||||
|
||||
return {
|
||||
"success": result.success,
|
||||
"error": result.error,
|
||||
"documents": [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])],
|
||||
"data": result.model_dump() if hasattr(result, "model_dump") else {"success": result.success, "error": result.error},
|
||||
}
|
||||
27
modules/workflows/automation2/executors/triggerExecutor.py
Normal file
27
modules/workflows/automation2/executors/triggerExecutor.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Trigger node executor.
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TriggerExecutor:
|
||||
"""Execute trigger nodes (manual, schedule, formSubmit)."""
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
nodeType = node.get("type", "")
|
||||
if nodeType == "trigger.manual":
|
||||
return {"triggered": True, "source": "manual"}
|
||||
if nodeType == "trigger.schedule":
|
||||
return {"triggered": True, "source": "schedule"}
|
||||
if nodeType == "trigger.formSubmit":
|
||||
params = node.get("parameters") or {}
|
||||
formId = params.get("formId", "")
|
||||
return {"triggered": True, "source": "formSubmit", "formId": formId}
|
||||
return {"triggered": True, "source": "unknown"}
|
||||
163
modules/workflows/automation2/graphUtils.py
Normal file
163
modules/workflows/automation2/graphUtils.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Graph parsing, validation, and topological sort for automation2.
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any, Tuple, Set
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parseGraph(graph: Dict[str, Any]) -> Tuple[List[Dict], List[Dict], Set[str]]:
|
||||
"""
|
||||
Parse graph into nodes, connections, and node IDs.
|
||||
graph: { nodes: [...], connections: [...] }
|
||||
Returns (nodes, connections, node_ids).
|
||||
"""
|
||||
nodes = graph.get("nodes") or []
|
||||
connections = graph.get("connections") or []
|
||||
nodeIds = {n.get("id") for n in nodes if n.get("id")}
|
||||
return nodes, connections, nodeIds
|
||||
|
||||
|
||||
def buildConnectionMap(connections: List[Dict]) -> Dict[str, List[Tuple[str, int, int]]]:
|
||||
"""
|
||||
Build map: targetNodeId -> [(sourceNodeId, sourceOutput, targetInput), ...]
|
||||
connection: { source, sourceOutput?, target, targetInput? }
|
||||
"""
|
||||
out: Dict[str, List[Tuple[str, int, int]]] = {}
|
||||
for c in connections:
|
||||
src = c.get("source") or c.get("sourceNode")
|
||||
tgt = c.get("target") or c.get("targetNode")
|
||||
if not src or not tgt:
|
||||
continue
|
||||
so = c.get("sourceOutput", 0)
|
||||
ti = c.get("targetInput", 0)
|
||||
if tgt not in out:
|
||||
out[tgt] = []
|
||||
out[tgt].append((src, so, ti))
|
||||
return out
|
||||
|
||||
|
||||
def getInputSources(nodeId: str, connectionMap: Dict[str, List[Tuple[str, int, int]]]) -> Dict[int, Tuple[str, int]]:
|
||||
"""
|
||||
For a node, return targetInput -> (sourceNodeId, sourceOutput).
|
||||
"""
|
||||
result: Dict[int, Tuple[str, int]] = {}
|
||||
for src, so, ti in connectionMap.get(nodeId, []):
|
||||
result[ti] = (src, so)
|
||||
return result
|
||||
|
||||
|
||||
def getTriggerNodes(nodes: List[Dict]) -> List[Dict]:
|
||||
"""Return nodes with category=trigger or type starting with trigger."""
|
||||
return [n for n in nodes if (n.get("type", "").startswith("trigger.") or n.get("category") == "trigger")]
|
||||
|
||||
|
||||
def validateGraph(graph: Dict[str, Any], nodeTypeIds: Set[str]) -> List[str]:
|
||||
"""
|
||||
Validate graph: all node IDs referenced in connections exist, all node types in registry.
|
||||
Returns list of error messages (empty if valid).
|
||||
"""
|
||||
errors = []
|
||||
nodes, connections, nodeIds = parseGraph(graph)
|
||||
|
||||
for n in nodes:
|
||||
nid = n.get("id")
|
||||
ntype = n.get("type")
|
||||
if not nid:
|
||||
errors.append("Node missing id")
|
||||
continue
|
||||
if not ntype:
|
||||
errors.append(f"Node {nid} missing type")
|
||||
continue
|
||||
if ntype not in nodeTypeIds:
|
||||
errors.append(f"Unknown node type '{ntype}' for node {nid}")
|
||||
|
||||
connMap = buildConnectionMap(connections)
|
||||
allReferred = set()
|
||||
for tgt, pairs in connMap.items():
|
||||
allReferred.add(tgt)
|
||||
for src, _, _ in pairs:
|
||||
allReferred.add(src)
|
||||
for nid in allReferred:
|
||||
if nid not in nodeIds:
|
||||
errors.append(f"Connection references non-existent node {nid}")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def topoSort(nodes: List[Dict], connectionMap: Dict[str, List[Tuple[str, int, int]]]) -> List[Dict]:
|
||||
"""
|
||||
Topological sort: start from trigger nodes, then BFS by connections.
|
||||
Returns ordered list of nodes (trigger first, then downstream).
|
||||
"""
|
||||
nodeById = {n["id"]: n for n in nodes if n.get("id")}
|
||||
triggers = getTriggerNodes(nodes)
|
||||
if not triggers:
|
||||
return list(nodes)
|
||||
|
||||
visited: Set[str] = set()
|
||||
order: List[Dict] = []
|
||||
|
||||
def bfs(startIds: List[str]) -> None:
|
||||
from collections import deque
|
||||
q = deque(startIds)
|
||||
for nid in startIds:
|
||||
visited.add(nid)
|
||||
if nid in nodeById:
|
||||
order.append(nodeById[nid])
|
||||
while q:
|
||||
nid = q.popleft()
|
||||
# Find all nodes that receive from nid
|
||||
for tgt, pairs in connectionMap.items():
|
||||
for src, _, _ in pairs:
|
||||
if src == nid and tgt not in visited:
|
||||
visited.add(tgt)
|
||||
q.append(tgt)
|
||||
if tgt in nodeById:
|
||||
order.append(nodeById[tgt])
|
||||
|
||||
triggerIds = [t["id"] for t in triggers]
|
||||
bfs(triggerIds)
|
||||
|
||||
# Append any orphan nodes (e.g. disconnected)
|
||||
for n in nodes:
|
||||
if n.get("id") and n["id"] not in visited:
|
||||
order.append(n)
|
||||
|
||||
return order
|
||||
|
||||
|
||||
def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
|
||||
"""
|
||||
Resolve {{nodeId.output}} or {{nodeId.output.path}} in strings/structures.
|
||||
"""
|
||||
import json
|
||||
import re
|
||||
if isinstance(value, str):
|
||||
def repl(m):
|
||||
ref = m.group(1).strip()
|
||||
parts = ref.split(".")
|
||||
nodeId = parts[0]
|
||||
data = nodeOutputs.get(nodeId)
|
||||
if data is None:
|
||||
return m.group(0)
|
||||
if len(parts) < 2:
|
||||
return json.dumps(data) if isinstance(data, (dict, list)) else str(data)
|
||||
rest = ".".join(parts[1:])
|
||||
if data is None:
|
||||
return m.group(0)
|
||||
for k in rest.split("."):
|
||||
if isinstance(data, dict) and k in data:
|
||||
data = data[k]
|
||||
elif isinstance(data, (list, tuple)) and k.isdigit():
|
||||
data = data[int(k)]
|
||||
else:
|
||||
return m.group(0)
|
||||
return str(data) if data is not None else m.group(0)
|
||||
return re.sub(r"\{\{\s*([^}]+)\s*\}\}", repl, value)
|
||||
if isinstance(value, dict):
|
||||
return {k: resolveParameterReferences(v, nodeOutputs) for k, v in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [resolveParameterReferences(v, nodeOutputs) for v in value]
|
||||
return value
|
||||
|
|
@ -56,6 +56,7 @@ Pillow>=10.0.0 # Für Bildverarbeitung (als PIL importiert)
|
|||
# Audio format conversion handled by pure Python implementation
|
||||
|
||||
## Utilities & Timezone Support
|
||||
setuptools>=65.0.0,<82.0.0 # Provides pkg_resources (removed in 82+); required by google-cloud-translate
|
||||
python-dateutil==2.8.2
|
||||
python-dotenv==1.0.0
|
||||
pytz>=2023.3 # For timezone handling and UTC operations
|
||||
|
|
|
|||
Loading…
Reference in a new issue