next version of visual workflow editor with ClickUp Connections
This commit is contained in:
parent
f796ae3807
commit
d0f8444bac
70 changed files with 5058 additions and 443 deletions
16
app.py
16
app.py
|
|
@ -21,6 +21,7 @@ from datetime import datetime
|
|||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.eventManagement import eventManager
|
||||
from modules.workflows.automation import subAutomationSchedule
|
||||
from modules.workflows.automation2 import subAutomation2Schedule
|
||||
from modules.features.automation2.emailPoller import start as startAutomation2EmailPoller
|
||||
from modules.features.automation2.emailPoller import stop as stopAutomation2EmailPoller
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
|
|
@ -355,7 +356,15 @@ async def lifespan(app: FastAPI):
|
|||
logger.warning(f"Could not initialize feature containers: {e}")
|
||||
|
||||
# --- Init Managers ---
|
||||
import asyncio
|
||||
try:
|
||||
main_loop = asyncio.get_running_loop()
|
||||
eventManager.set_event_loop(main_loop)
|
||||
subAutomation2Schedule.set_main_loop(main_loop)
|
||||
except RuntimeError:
|
||||
pass
|
||||
subAutomationSchedule.start(eventUser) # Automation scheduler
|
||||
subAutomation2Schedule.start(eventUser) # Automation2 schedule trigger (cron)
|
||||
# Automation2 email poller: started on-demand when a run pauses for email.checkEmail
|
||||
eventManager.start()
|
||||
|
||||
|
|
@ -386,6 +395,7 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
# --- Stop Managers ---
|
||||
stopAutomation2EmailPoller(eventUser) # Automation2 email poller (no-op if not running)
|
||||
subAutomation2Schedule.stop(eventUser) # Automation2 schedule
|
||||
eventManager.stop()
|
||||
subAutomationSchedule.stop(eventUser) # Automation scheduler
|
||||
|
||||
|
|
@ -560,6 +570,12 @@ app.include_router(msftRouter)
|
|||
from modules.routes.routeSecurityGoogle import router as googleRouter
|
||||
app.include_router(googleRouter)
|
||||
|
||||
from modules.routes.routeSecurityClickup import router as clickupRouter
|
||||
app.include_router(clickupRouter)
|
||||
|
||||
from modules.routes.routeClickup import router as clickupApiRouter
|
||||
app.include_router(clickupApiRouter)
|
||||
|
||||
from modules.routes.routeVoiceGoogle import router as voiceGoogleRouter
|
||||
app.include_router(voiceGoogleRouter)
|
||||
|
||||
|
|
|
|||
|
|
@ -46,6 +46,11 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = http://localhost:8000/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = CZECD706WLSX6UV13YI4ACNW50ADZHHXDAJALHE0YE030QFSI6Y9HP4Y61JT7CF0
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGWDkxSldfM0NCZ3dmbHY5cS1nQlI3UWZ4ZWRrNVdUdEFKa25RckRiQWY0c1E5MjVsZzlfRkZEU0VFU2tNQ01qZnRNQ0pZVU9hVFN6OEU0RXhwdTl3algzLWJlSXRhYmZlMHltSC1XejlGWEU5TDF1LUlYNEh1aG9tRFI4YmlCYzUyei02U1dabWoyb0N2dVFSb1RhWTNnQjBCZkFjV0FfOWdYdDVpX1k5R2pYM1R6SHRiaE10V1l1dnQybjVHWDRiQUJLM0UxRDZnczhJZGFsc3JhOU82QT09
|
||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGcHNWTWpBWkFHRExtdU01N3RyZzNsMjhUS3NiVTNCZmMwN2NEcFZ6UkQ1a2I0aUkyNU4wR2dUdHJXYmtkaEFRUnFpcThObHBEQmJkdEFnT1FXeUxOTlU3UDFNRzl6LWdpRFpYdExvY3FTTG9MTkswdEhrVkNKQVFucnBjSnhLNm4=
|
||||
|
|
|
|||
|
|
@ -46,6 +46,11 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = CZECD706WLSX6UV13YI4ACNW50ADZHHXDAJALHE0YE030QFSI6Y9HP4Y61JT7CF0
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = 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
|
||||
STRIPE_WEBHOOK_SECRET = INT_ENC:Z0FBQUFBQnBudkpGamJBNW91VUdEaThWRTFiTWpyb3NqSDJJcGtjNkhUVVZqVElxUWExY05KcllSYVk1SkRuS1NjYWpZUk1uU29nb2pzdXUxRzBsOEgyRWtmUEw3dUF4ejFIXzNwTVZRM1R1bVVhTUs4ZHJMT0V4Xy1pcHVfWlBaQV9wVXo5MGlQYXA=
|
||||
|
|
|
|||
|
|
@ -46,6 +46,11 @@ Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.ap
|
|||
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = CZECD706WLSX6UV13YI4ACNW50ADZHHXDAJALHE0YE030QFSI6Y9HP4Y61JT7CF0
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = 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
|
||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@ class CSRFMiddleware(BaseHTTPMiddleware):
|
|||
"/api/google/auth/login/callback",
|
||||
"/api/google/auth/connect",
|
||||
"/api/google/auth/connect/callback",
|
||||
"/api/clickup/auth/connect",
|
||||
"/api/clickup/auth/connect/callback",
|
||||
"/api/billing/webhook/stripe", # Stripe webhook (auth via Stripe-Signature)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1026,6 +1026,9 @@ class DatabaseConnector:
|
|||
continue
|
||||
colType = fields.get(key, "TEXT")
|
||||
logger.debug(f"_buildPaginationClauses: filter key='{key}' val={val!r} type(val)={type(val).__name__} colType={colType}")
|
||||
if val is None:
|
||||
where_parts.append(f'"{key}" IS NULL')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
|
|
|
|||
|
|
@ -52,6 +52,12 @@ class ConnectorResolver:
|
|||
except ImportError:
|
||||
logger.debug("FtpConnector not available (stub)")
|
||||
|
||||
try:
|
||||
from modules.connectors.providerClickup.connectorClickup import ClickupConnector
|
||||
ConnectorResolver._providerRegistry["clickup"] = ClickupConnector
|
||||
except ImportError:
|
||||
logger.warning("ClickupConnector not available")
|
||||
|
||||
async def resolve(self, connectionId: str) -> ProviderConnector:
|
||||
"""Resolve connectionId to a ProviderConnector with a fresh access token."""
|
||||
connection = await self._loadConnection(connectionId)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from typing import Optional
|
|||
import logging
|
||||
import aiohttp
|
||||
from modules.datamodels.datamodelTickets import TicketBase, TicketFieldAttribute
|
||||
from modules.serviceCenter.services.serviceClickup.mainServiceClickup import clickup_authorization_header
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -30,7 +31,7 @@ class ConnectorTicketClickup(TicketBase):
|
|||
|
||||
def _headers(self) -> dict:
|
||||
return {
|
||||
"Authorization": self.apiToken,
|
||||
"Authorization": clickup_authorization_header(self.apiToken),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
|
|
|
|||
7
modules/connectors/providerClickup/__init__.py
Normal file
7
modules/connectors/providerClickup/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp provider connector."""
|
||||
|
||||
from .connectorClickup import ClickupConnector
|
||||
|
||||
__all__ = ["ClickupConnector"]
|
||||
268
modules/connectors/providerClickup/connectorClickup.py
Normal file
268
modules/connectors/providerClickup/connectorClickup.py
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp ProviderConnector — virtual paths for teams → lists → tasks (table rows).
|
||||
|
||||
Path convention (leading slash, no trailing slash except root):
|
||||
/ — authorized workspaces (teams)
|
||||
/team/{teamId} — spaces in the workspace
|
||||
/team/{teamId}/space/{spaceId} — folders + folderless lists
|
||||
/team/{teamId}/space/{spaceId}/folder/{folderId} — lists in folder
|
||||
/team/{teamId}/list/{listId} — tasks in list (rows)
|
||||
/team/{teamId}/list/{listId}/task/{taskId} — single task (download = JSON)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from modules.connectors.connectorProviderBase import (
|
||||
ProviderConnector,
|
||||
ServiceAdapter,
|
||||
DownloadResult,
|
||||
)
|
||||
from modules.datamodels.datamodelDataSource import ExternalEntry
|
||||
from modules.serviceCenter.services.serviceClickup.mainServiceClickup import ClickupService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# type metadata for ExternalEntry.metadata["cuType"]
|
||||
_CU_TEAM = "team"
|
||||
_CU_SPACE = "space"
|
||||
_CU_FOLDER = "folder"
|
||||
_CU_LIST = "list"
|
||||
_CU_TASK = "task"
|
||||
|
||||
|
||||
def _norm(path: str) -> str:
|
||||
p = (path or "").strip() or "/"
|
||||
if not p.startswith("/"):
|
||||
p = "/" + p
|
||||
if p != "/" and p.endswith("/"):
|
||||
p = p.rstrip("/")
|
||||
return p
|
||||
|
||||
|
||||
class ClickupListsAdapter(ServiceAdapter):
|
||||
"""Maps ClickUp hierarchy + list tasks to browse/download/upload/search."""
|
||||
|
||||
def __init__(self, access_token: str):
|
||||
self._token = access_token
|
||||
# Minimal service instance for API calls (no ServiceCenter context)
|
||||
self._svc = ClickupService(context=None, get_service=lambda _: None)
|
||||
self._svc.setAccessToken(access_token)
|
||||
|
||||
async def browse(self, path: str, filter: Optional[str] = None) -> List[ExternalEntry]:
|
||||
p = _norm(path)
|
||||
out: List[ExternalEntry] = []
|
||||
|
||||
if p == "/":
|
||||
data = await self._svc.getAuthorizedTeams()
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
logger.warning(f"ClickUp browse root: {data.get('error')}")
|
||||
return []
|
||||
teams = data.get("teams", []) if isinstance(data, dict) else []
|
||||
for t in teams:
|
||||
tid = str(t.get("id", ""))
|
||||
name = t.get("name") or tid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{tid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_TEAM, "id": tid, "raw": t},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)$", p)
|
||||
if m:
|
||||
team_id = m.group(1)
|
||||
data = await self._svc.getSpaces(team_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return []
|
||||
spaces = data.get("spaces", []) if isinstance(data, dict) else []
|
||||
for s in spaces:
|
||||
sid = str(s.get("id", ""))
|
||||
name = s.get("name") or sid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/space/{sid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_SPACE, "id": sid, "raw": s},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/space/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, space_id = m.group(1), m.group(2)
|
||||
folders_r = await self._svc.getFolders(space_id)
|
||||
lists_r = await self._svc.getFolderlessLists(space_id)
|
||||
if isinstance(folders_r, dict) and not folders_r.get("error"):
|
||||
for f in folders_r.get("folders", []) or []:
|
||||
fid = str(f.get("id", ""))
|
||||
name = f.get("name") or fid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/space/{space_id}/folder/{fid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_FOLDER, "id": fid, "raw": f},
|
||||
)
|
||||
)
|
||||
if isinstance(lists_r, dict) and not lists_r.get("error"):
|
||||
for lst in lists_r.get("lists", []) or []:
|
||||
lid = str(lst.get("id", ""))
|
||||
name = lst.get("name") or lid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_LIST, "id": lid, "raw": lst},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/space/([^/]+)/folder/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, _space_id, folder_id = m.group(1), m.group(2), m.group(3)
|
||||
data = await self._svc.getListsInFolder(folder_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return []
|
||||
for lst in data.get("lists", []) or []:
|
||||
lid = str(lst.get("id", ""))
|
||||
name = lst.get("name") or lid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}",
|
||||
isFolder=True,
|
||||
metadata={"cuType": _CU_LIST, "id": lid, "raw": lst},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, list_id = m.group(1), m.group(2)
|
||||
page = 0
|
||||
while True:
|
||||
data = await self._svc.getTasksInList(list_id, page=page)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
break
|
||||
tasks = data.get("tasks", []) if isinstance(data, dict) else []
|
||||
for task in tasks:
|
||||
tid = str(task.get("id", ""))
|
||||
name = task.get("name") or tid
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{list_id}/task/{tid}",
|
||||
isFolder=False,
|
||||
metadata={
|
||||
"cuType": _CU_TASK,
|
||||
"id": tid,
|
||||
"task": task,
|
||||
},
|
||||
)
|
||||
)
|
||||
if len(tasks) < 100:
|
||||
break
|
||||
page += 1
|
||||
return out
|
||||
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if m:
|
||||
team_id, list_id, task_id = m.group(1), m.group(2), m.group(3)
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=f"task-{task_id}.json",
|
||||
path=p,
|
||||
isFolder=False,
|
||||
metadata={"cuType": _CU_TASK, "id": task_id, "listId": list_id, "teamId": team_id},
|
||||
)
|
||||
)
|
||||
return out
|
||||
|
||||
logger.warning(f"ClickUp browse: unsupported path {p}")
|
||||
return []
|
||||
|
||||
async def download(self, path: str) -> Any:
|
||||
p = _norm(path)
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if not m:
|
||||
return b""
|
||||
task_id = m.group(3)
|
||||
data = await self._svc.getTask(task_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return json.dumps(data).encode("utf-8")
|
||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
"""Upload attachment to a task. Path must be .../list/{listId}/task/{taskId}."""
|
||||
p = _norm(path)
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)/task/([^/]+)$", p)
|
||||
if not m:
|
||||
return {"error": "Path must be /team/{teamId}/list/{listId}/task/{taskId} for upload"}
|
||||
task_id = m.group(3)
|
||||
return await self._svc.uploadTaskAttachment(task_id, data, fileName)
|
||||
|
||||
async def search(self, query: str, path: Optional[str] = None) -> List[ExternalEntry]:
|
||||
base = _norm(path or "/")
|
||||
team_id: Optional[str] = None
|
||||
mt = re.match(r"^/team/([^/]+)", base)
|
||||
if mt:
|
||||
team_id = mt.group(1)
|
||||
if not team_id:
|
||||
teams = await self._svc.getAuthorizedTeams()
|
||||
if not isinstance(teams, dict) or teams.get("error"):
|
||||
return []
|
||||
tl = teams.get("teams") or []
|
||||
if not tl:
|
||||
return []
|
||||
team_id = str(tl[0].get("id", ""))
|
||||
|
||||
out: List[ExternalEntry] = []
|
||||
page = 0
|
||||
while True:
|
||||
data = await self._svc.searchTeamTasks(team_id, query=query, page=page)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
break
|
||||
tasks = data.get("tasks", []) if isinstance(data, dict) else []
|
||||
for task in tasks:
|
||||
tid = str(task.get("id", ""))
|
||||
name = task.get("name") or tid
|
||||
list_obj = task.get("list") or {}
|
||||
lid = str(list_obj.get("id", "")) if list_obj else ""
|
||||
if not lid:
|
||||
continue
|
||||
out.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/team/{team_id}/list/{lid}/task/{tid}",
|
||||
isFolder=False,
|
||||
metadata={"cuType": _CU_TASK, "id": tid, "task": task},
|
||||
)
|
||||
)
|
||||
if len(tasks) < 25:
|
||||
break
|
||||
page += 1
|
||||
return out
|
||||
|
||||
|
||||
class ClickupConnector(ProviderConnector):
|
||||
"""One ClickUp connection → clickup virtual file service."""
|
||||
|
||||
def getAvailableServices(self) -> List[str]:
|
||||
return ["clickup"]
|
||||
|
||||
def getServiceAdapter(self, service: str) -> ServiceAdapter:
|
||||
if service != "clickup":
|
||||
raise ValueError(f"ClickUp only supports 'clickup' service, got '{service}'")
|
||||
return ClickupListsAdapter(self.accessToken)
|
||||
|
|
@ -17,7 +17,9 @@ class DataSource(BaseModel):
|
|||
"""Configured external data source linked to a UserConnection."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
|
||||
connectionId: str = Field(description="FK to UserConnection")
|
||||
sourceType: str = Field(description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder")
|
||||
sourceType: str = Field(
|
||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)"
|
||||
)
|
||||
path: str = Field(description="External path (e.g. '/sites/MySite/Documents/Reports')")
|
||||
label: str = Field(description="User-visible label (often the last path segment)")
|
||||
displayPath: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ class AuthAuthority(str, Enum):
|
|||
LOCAL = "local"
|
||||
GOOGLE = "google"
|
||||
MSFT = "msft"
|
||||
CLICKUP = "clickup"
|
||||
|
||||
class ConnectionStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
|
|
@ -141,7 +142,12 @@ class UserConnection(BaseModel):
|
|||
@property
|
||||
def displayLabel(self) -> str:
|
||||
"""Human-readable label for display in dropdowns"""
|
||||
authorityLabels = {"msft": "Microsoft", "google": "Google", "local": "Local"}
|
||||
authorityLabels = {
|
||||
"msft": "Microsoft",
|
||||
"google": "Google",
|
||||
"local": "Local",
|
||||
"clickup": "ClickUp",
|
||||
}
|
||||
return f"{authorityLabels.get(self.authority.value, self.authority.value)}: {self.externalUsername}"
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ class Automation2Workflow(BaseModel):
|
|||
description="Whether workflow is active",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False},
|
||||
)
|
||||
invocations: List[Dict[str, Any]] = Field(
|
||||
default_factory=list,
|
||||
description="Entry points / starts (manual, form, schedule, webhook, …) configured outside the canvas",
|
||||
json_schema_extra={"frontend_type": "textarea", "frontend_required": False},
|
||||
)
|
||||
|
||||
|
||||
registerModelLabels(
|
||||
|
|
@ -48,6 +53,7 @@ registerModelLabels(
|
|||
"label": {"en": "Label", "de": "Bezeichnung", "fr": "Libellé"},
|
||||
"graph": {"en": "Graph", "de": "Graph", "fr": "Graphe"},
|
||||
"active": {"en": "Active", "de": "Aktiv", "fr": "Actif"},
|
||||
"invocations": {"en": "Starts / Entry points", "de": "Starts / Einstiegspunkte", "fr": "Points d'entrée"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
96
modules/features/automation2/entryPoints.py
Normal file
96
modules/features/automation2/entryPoints.py
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""
|
||||
Workflow entry points (Starts) — configuration outside the flow editor.
|
||||
|
||||
Kinds align with run envelope trigger.type where applicable.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
# On-demand (gear: Manueller Trigger, Formular)
|
||||
KINDS_ON_DEMAND = frozenset({"manual", "form", "api"})
|
||||
|
||||
# Always-on (gear: Zeitplan, Immer aktiv, plus legacy listener kinds)
|
||||
KINDS_ALWAYS_ON = frozenset({"schedule", "always_on", "email", "webhook", "event"})
|
||||
|
||||
ALL_KINDS = KINDS_ON_DEMAND | KINDS_ALWAYS_ON
|
||||
|
||||
|
||||
def category_for_kind(kind: str) -> str:
|
||||
if kind in KINDS_ALWAYS_ON:
|
||||
return "always_on"
|
||||
return "on_demand"
|
||||
|
||||
|
||||
def default_manual_entry_point() -> Dict[str, Any]:
|
||||
"""Single default manual start when a workflow has no invocations yet."""
|
||||
return {
|
||||
"id": str(uuid.uuid4()),
|
||||
"kind": "manual",
|
||||
"category": "on_demand",
|
||||
"enabled": True,
|
||||
"title": {
|
||||
"de": "Jetzt ausführen",
|
||||
"en": "Run now",
|
||||
"fr": "Exécuter",
|
||||
},
|
||||
"description": {},
|
||||
"config": {},
|
||||
}
|
||||
|
||||
|
||||
def _normalize_title(title: Any) -> Dict[str, str]:
|
||||
if isinstance(title, dict):
|
||||
return {k: str(v) for k, v in title.items() if v is not None}
|
||||
if isinstance(title, str) and title.strip():
|
||||
return {"de": title, "en": title, "fr": title}
|
||||
return {"de": "Start", "en": "Start", "fr": "Départ"}
|
||||
|
||||
|
||||
def normalize_invocation_entry(raw: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Validate and normalize a single entry point dict."""
|
||||
kind = (raw.get("kind") or "manual").strip()
|
||||
if kind not in ALL_KINDS:
|
||||
kind = "manual"
|
||||
cat = raw.get("category")
|
||||
if cat not in ("on_demand", "always_on"):
|
||||
cat = category_for_kind(kind)
|
||||
eid = raw.get("id") or str(uuid.uuid4())
|
||||
enabled = raw.get("enabled", True)
|
||||
if not isinstance(enabled, bool):
|
||||
enabled = bool(enabled)
|
||||
config = raw.get("config") if isinstance(raw.get("config"), dict) else {}
|
||||
desc = raw.get("description") if isinstance(raw.get("description"), dict) else {}
|
||||
return {
|
||||
"id": str(eid),
|
||||
"kind": kind,
|
||||
"category": cat,
|
||||
"enabled": enabled,
|
||||
"title": _normalize_title(raw.get("title")),
|
||||
"description": desc,
|
||||
"config": config,
|
||||
}
|
||||
|
||||
|
||||
def normalize_invocations_list(items: Optional[List[Any]]) -> List[Dict[str, Any]]:
|
||||
if not items:
|
||||
return [default_manual_entry_point()]
|
||||
out: List[Dict[str, Any]] = []
|
||||
for raw in items:
|
||||
if isinstance(raw, dict):
|
||||
out.append(normalize_invocation_entry(raw))
|
||||
if not out:
|
||||
return [default_manual_entry_point()]
|
||||
return out
|
||||
|
||||
|
||||
# Schedule / cron: wire an external job runner (APScheduler, Celery, system cron) to call
|
||||
# POST .../execute with entryPointId set to a schedule entry — no separate in-process scheduler here yet.
|
||||
|
||||
|
||||
def find_invocation(workflow: Dict[str, Any], entry_point_id: str) -> Optional[Dict[str, Any]]:
|
||||
for inv in workflow.get("invocations") or []:
|
||||
if isinstance(inv, dict) and inv.get("id") == entry_point_id:
|
||||
return inv
|
||||
return None
|
||||
|
|
@ -30,6 +30,7 @@ from modules.features.automation2.datamodelFeatureAutomation2 import (
|
|||
Automation2WorkflowRun,
|
||||
Automation2HumanTask,
|
||||
)
|
||||
from modules.features.automation2.entryPoints import normalize_invocations_list
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
|
|
@ -49,6 +50,83 @@ def getAutomation2Interface(
|
|||
)
|
||||
|
||||
|
||||
def getAllWorkflowsForScheduling() -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all active Automation2 workflows that have a schedule entry point (primary invocation).
|
||||
Used by the scheduler to register cron jobs. Does not filter by mandate/instance.
|
||||
"""
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "localhost")
|
||||
dbDatabase = "poweron_automation2"
|
||||
dbUser = APP_CONFIG.get("DB_USER")
|
||||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
connector = DatabaseConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
dbPassword=dbPassword,
|
||||
dbPort=dbPort,
|
||||
userId=None,
|
||||
)
|
||||
if not connector._ensureTableExists(Automation2Workflow):
|
||||
logger.warning("Automation2 schedule: table Automation2Workflow does not exist")
|
||||
return []
|
||||
# Don't filter by active in SQL: existing workflows may have active=NULL.
|
||||
# Treat NULL as active; skip only when active is explicitly False.
|
||||
records = connector.getRecordset(
|
||||
Automation2Workflow,
|
||||
recordFilter=None,
|
||||
)
|
||||
raw_count = len(records) if records else 0
|
||||
result = []
|
||||
for r in records or []:
|
||||
if r.get("active") is False:
|
||||
continue
|
||||
wf = dict(r)
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
invocations = wf.get("invocations") or []
|
||||
primary = invocations[0] if invocations else {}
|
||||
if not isinstance(primary, dict):
|
||||
primary = {}
|
||||
|
||||
# Cron comes from graph start node params (trigger.schedule)
|
||||
graph = wf.get("graph") or {}
|
||||
nodes = graph.get("nodes") or []
|
||||
cron = None
|
||||
for n in nodes:
|
||||
if n.get("type") == "trigger.schedule":
|
||||
params = n.get("parameters") or {}
|
||||
cron = params.get("cron")
|
||||
if cron:
|
||||
break
|
||||
|
||||
if not cron or not isinstance(cron, str) or not cron.strip():
|
||||
continue
|
||||
|
||||
# Prefer invocations; if graph has trigger.schedule but invocations say manual, still schedule
|
||||
if primary.get("kind") == "schedule" and primary.get("enabled", True):
|
||||
entry_point_id = primary.get("id")
|
||||
elif invocations and isinstance(invocations[0], dict) and invocations[0].get("id"):
|
||||
entry_point_id = invocations[0].get("id")
|
||||
else:
|
||||
entry_point_id = str(uuid.uuid4())
|
||||
|
||||
result.append({
|
||||
"workflowId": wf.get("id"),
|
||||
"mandateId": wf.get("mandateId"),
|
||||
"featureInstanceId": wf.get("featureInstanceId"),
|
||||
"entryPointId": entry_point_id,
|
||||
"cron": cron.strip(),
|
||||
"workflow": wf,
|
||||
})
|
||||
logger.info(
|
||||
"Automation2 schedule: DB has %d workflow(s), %d active with trigger.schedule+cron",
|
||||
raw_count,
|
||||
len(result),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class Automation2Objects:
|
||||
"""Interface for Automation2 database operations."""
|
||||
|
||||
|
|
@ -87,18 +165,26 @@ class Automation2Objects:
|
|||
# Workflow CRUD
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def getWorkflows(self) -> List[Dict[str, Any]]:
|
||||
"""Get all workflows for this mandate and feature instance."""
|
||||
def getWorkflows(self, active: Optional[bool] = None) -> List[Dict[str, Any]]:
|
||||
"""Get all workflows for this mandate and feature instance.
|
||||
Optional active filter: True=only active, False=only inactive, None=all.
|
||||
"""
|
||||
if not self.db._ensureTableExists(Automation2Workflow):
|
||||
return []
|
||||
rf: Dict[str, Any] = {
|
||||
"mandateId": self.mandateId,
|
||||
"featureInstanceId": self.featureInstanceId,
|
||||
}
|
||||
if active is not None:
|
||||
rf["active"] = active
|
||||
records = self.db.getRecordset(
|
||||
Automation2Workflow,
|
||||
recordFilter={
|
||||
"mandateId": self.mandateId,
|
||||
"featureInstanceId": self.featureInstanceId,
|
||||
},
|
||||
recordFilter=rf,
|
||||
)
|
||||
return [dict(r) for r in records] if records else []
|
||||
rows = [dict(r) for r in records] if records else []
|
||||
for wf in rows:
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
return rows
|
||||
|
||||
def getWorkflow(self, workflowId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single workflow by ID."""
|
||||
|
|
@ -114,7 +200,9 @@ class Automation2Objects:
|
|||
)
|
||||
if not records:
|
||||
return None
|
||||
return dict(records[0])
|
||||
wf = dict(records[0])
|
||||
wf["invocations"] = normalize_invocations_list(wf.get("invocations"))
|
||||
return wf
|
||||
|
||||
def createWorkflow(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new workflow."""
|
||||
|
|
@ -122,8 +210,18 @@ class Automation2Objects:
|
|||
data["id"] = str(uuid.uuid4())
|
||||
data["mandateId"] = self.mandateId
|
||||
data["featureInstanceId"] = self.featureInstanceId
|
||||
if "active" not in data or data.get("active") is None:
|
||||
data["active"] = True
|
||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||
created = self.db.recordCreate(Automation2Workflow, data)
|
||||
return dict(created)
|
||||
out = dict(created)
|
||||
out["invocations"] = normalize_invocations_list(out.get("invocations"))
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def updateWorkflow(self, workflowId: str, data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update an existing workflow."""
|
||||
|
|
@ -133,8 +231,17 @@ class Automation2Objects:
|
|||
# Don't overwrite mandateId/featureInstanceId
|
||||
data.pop("mandateId", None)
|
||||
data.pop("featureInstanceId", None)
|
||||
if "invocations" in data:
|
||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||
updated = self.db.recordModify(Automation2Workflow, workflowId, data)
|
||||
return dict(updated)
|
||||
out = dict(updated)
|
||||
out["invocations"] = normalize_invocations_list(out.get("invocations"))
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def deleteWorkflow(self, workflowId: str) -> bool:
|
||||
"""Delete a workflow."""
|
||||
|
|
@ -142,6 +249,11 @@ class Automation2Objects:
|
|||
if not existing:
|
||||
return False
|
||||
self.db.recordDelete(Automation2Workflow, workflowId)
|
||||
try:
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.trigger("automation2.workflow.changed")
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
|
@ -209,6 +321,28 @@ class Automation2Objects:
|
|||
)
|
||||
return [dict(r) for r in records] if records else []
|
||||
|
||||
def getRecentCompletedRuns(self, limit: int = 20) -> List[Dict[str, Any]]:
|
||||
"""Get recently completed runs for workflows in this instance (for output display)."""
|
||||
if not self.db._ensureTableExists(Automation2WorkflowRun):
|
||||
return []
|
||||
workflows = self.getWorkflows()
|
||||
wf_ids = [w["id"] for w in workflows if w.get("id")]
|
||||
if not wf_ids:
|
||||
return []
|
||||
records = self.db.getRecordset(
|
||||
Automation2WorkflowRun,
|
||||
recordFilter={"status": "completed"},
|
||||
)
|
||||
if not records:
|
||||
return []
|
||||
runs = [dict(r) for r in records if r.get("workflowId") in wf_ids]
|
||||
wf_by_id = {w["id"]: w for w in workflows}
|
||||
for r in runs:
|
||||
wf = wf_by_id.get(r.get("workflowId"), {})
|
||||
r["workflowLabel"] = wf.get("label") or r.get("workflowId", "")
|
||||
runs.sort(key=lambda x: (x.get("_modifiedAt") or x.get("_createdAt") or 0), reverse=True)
|
||||
return runs[:limit]
|
||||
|
||||
def getRunsWaitingForEmail(self) -> List[Dict[str, Any]]:
|
||||
"""Get all paused runs waiting for a new email (for background poller)."""
|
||||
if not self.db._ensureTableExists(Automation2WorkflowRun):
|
||||
|
|
@ -289,23 +423,38 @@ class Automation2Objects:
|
|||
status: str = None,
|
||||
assigneeId: str = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get tasks with optional filters. AssigneeId filters to that user; None returns all."""
|
||||
"""Get tasks with optional filters.
|
||||
When assigneeId is set: returns tasks assigned to that user OR unassigned (so schedule tasks show up).
|
||||
When assigneeId is None: returns all tasks.
|
||||
"""
|
||||
if not self.db._ensureTableExists(Automation2HumanTask):
|
||||
return []
|
||||
rf = {}
|
||||
base_rf: Dict[str, Any] = {}
|
||||
if workflowId:
|
||||
rf["workflowId"] = workflowId
|
||||
base_rf["workflowId"] = workflowId
|
||||
if runId:
|
||||
rf["runId"] = runId
|
||||
base_rf["runId"] = runId
|
||||
if status:
|
||||
rf["status"] = status
|
||||
base_rf["status"] = status
|
||||
if assigneeId:
|
||||
rf["assigneeId"] = assigneeId
|
||||
records = self.db.getRecordset(
|
||||
Automation2HumanTask,
|
||||
recordFilter=rf if rf else None,
|
||||
)
|
||||
items = [dict(r) for r in records] if records else []
|
||||
rf_assigned = {**base_rf, "assigneeId": assigneeId}
|
||||
rf_unassigned = {**base_rf, "assigneeId": None}
|
||||
records1 = self.db.getRecordset(Automation2HumanTask, recordFilter=rf_assigned)
|
||||
records2 = self.db.getRecordset(Automation2HumanTask, recordFilter=rf_unassigned)
|
||||
seen = set()
|
||||
items = []
|
||||
for r in (records1 or []) + (records2 or []):
|
||||
rec = dict(r)
|
||||
tid = rec.get("id")
|
||||
if tid and tid not in seen:
|
||||
seen.add(tid)
|
||||
items.append(rec)
|
||||
else:
|
||||
records = self.db.getRecordset(
|
||||
Automation2HumanTask,
|
||||
recordFilter=base_rf if base_rf else None,
|
||||
)
|
||||
items = [dict(r) for r in records] if records else []
|
||||
workflows = {w["id"]: w for w in self.getWorkflows()}
|
||||
filtered = [t for t in items if t.get("workflowId") in workflows]
|
||||
return filtered
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ REQUIRED_SERVICES = [
|
|||
{"serviceKey": "ai", "meta": {"usage": "AI nodes"}},
|
||||
{"serviceKey": "extraction", "meta": {"usage": "Workflow method actions"}},
|
||||
{"serviceKey": "sharepoint", "meta": {"usage": "SharePoint actions"}},
|
||||
{"serviceKey": "clickup", "meta": {"usage": "ClickUp actions"}},
|
||||
{"serviceKey": "generation", "meta": {"usage": "file.create document rendering"}},
|
||||
]
|
||||
FEATURE_LABEL = {"en": "Automation 2", "de": "Automatisierung 2", "fr": "Automatisation 2"}
|
||||
FEATURE_ICON = "mdi-sitemap"
|
||||
|
|
@ -157,6 +159,8 @@ class _Automation2ServiceHub:
|
|||
utils = None
|
||||
extraction = None
|
||||
sharepoint = None
|
||||
clickup = None
|
||||
generation = None
|
||||
|
||||
|
||||
async def onStart(eventUser) -> None:
|
||||
|
|
|
|||
|
|
@ -3,18 +3,20 @@
|
|||
|
||||
from .triggers import TRIGGER_NODES
|
||||
from .flow import FLOW_NODES
|
||||
from .data import DATA_NODES
|
||||
from .input import INPUT_NODES
|
||||
from .ai import AI_NODES
|
||||
from .email import EMAIL_NODES
|
||||
from .sharepoint import SHAREPOINT_NODES
|
||||
from .clickup import CLICKUP_NODES
|
||||
from .file import FILE_NODES
|
||||
|
||||
STATIC_NODE_TYPES = (
|
||||
TRIGGER_NODES
|
||||
+ FLOW_NODES
|
||||
+ DATA_NODES
|
||||
+ INPUT_NODES
|
||||
+ AI_NODES
|
||||
+ EMAIL_NODES
|
||||
+ SHAREPOINT_NODES
|
||||
+ CLICKUP_NODES
|
||||
+ FILE_NODES
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ AI_NODES = [
|
|||
"description": {"en": "Enter a prompt and AI does something", "de": "Prompt eingeben und KI führt aus", "fr": "Entrer une invite et l'IA exécute"},
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "description": {"en": "AI prompt", "de": "KI-Prompt", "fr": "Invite IA"}},
|
||||
{"name": "resultType", "type": "string", "required": False, "description": {"en": "Output format (txt, json, md, etc.)", "de": "Ausgabeformat", "fr": "Format de sortie"}, "default": "txt"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -85,7 +84,6 @@ AI_NODES = [
|
|||
"description": {"en": "Generate document from prompt", "de": "Dokument aus Prompt generieren", "fr": "Générer un document"},
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "description": {"en": "Generation prompt", "de": "Generierungs-Prompt", "fr": "Invite de génération"}},
|
||||
{"name": "format", "type": "string", "required": False, "description": {"en": "Output format", "de": "Ausgabeformat", "fr": "Format de sortie"}, "default": "docx"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
227
modules/features/automation2/nodeDefinitions/clickup.py
Normal file
227
modules/features/automation2/nodeDefinitions/clickup.py
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp nodes — map to MethodClickup actions."""
|
||||
|
||||
CLICKUP_NODES = [
|
||||
{
|
||||
"id": "clickup.searchTasks",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Search tasks", "de": "Aufgaben suchen", "fr": "Rechercher tâches"},
|
||||
"description": {
|
||||
"en": "Search tasks in a workspace (team)",
|
||||
"de": "Aufgaben in einem Workspace suchen",
|
||||
"fr": "Rechercher des tâches dans un espace",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "teamId", "type": "string", "required": True, "description": {"en": "Workspace (team) ID", "de": "Team-/Workspace-ID", "fr": "ID équipe"}},
|
||||
{"name": "query", "type": "string", "required": True, "description": {"en": "Search query", "de": "Suchbegriff", "fr": "Requête"}},
|
||||
{"name": "page", "type": "number", "required": False, "description": {"en": "Page", "de": "Seite", "fr": "Page"}, "default": 0},
|
||||
{
|
||||
"name": "listId",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "If set, search this list via list API (not team search).",
|
||||
"de": "Wenn gesetzt: Suche in dieser Liste (Listen-API, nicht Team-Suche).",
|
||||
"fr": "Si défini : recherche dans cette liste (API liste).",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "includeClosed",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": False,
|
||||
"description": {
|
||||
"en": "With listId: include closed tasks.",
|
||||
"de": "Mit Liste: erledigte Aufgaben einbeziehen.",
|
||||
"fr": "Avec liste : inclure les tâches terminées.",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "fullTaskData",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": False,
|
||||
"description": {
|
||||
"en": "Return full ClickUp API JSON per task (very large). Default: slim fields only.",
|
||||
"de": "Vollständige ClickUp-Rohdaten pro Task (sehr groß). Standard: nur schlanke Felder.",
|
||||
"fr": "Réponse brute complète (très volumineuse). Par défaut : champs réduits.",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "matchNameOnly",
|
||||
"type": "boolean",
|
||||
"required": False,
|
||||
"default": True,
|
||||
"description": {
|
||||
"en": "Keep only tasks whose title contains the search query (default: on).",
|
||||
"de": "Nur Aufgaben, deren Titel den Suchbegriff enthält (Standard: an).",
|
||||
"fr": "Ne garder que les tâches dont le titre contient la requête (défaut : oui).",
|
||||
},
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-magnify", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "searchTasks",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"teamId": "teamId",
|
||||
"query": "query",
|
||||
"page": "page",
|
||||
"listId": "listId",
|
||||
"fullTaskData": "fullTaskData",
|
||||
"matchNameOnly": "matchNameOnly",
|
||||
"includeClosed": "includeClosed",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.listTasks",
|
||||
"category": "clickup",
|
||||
"label": {"en": "List tasks", "de": "Aufgaben auflisten", "fr": "Lister les tâches"},
|
||||
"description": {
|
||||
"en": "List tasks in a list (pick list path from browse)",
|
||||
"de": "Aufgaben einer Liste auflisten (Pfad aus Browse)",
|
||||
"fr": "Lister les tâches d'une liste",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "path", "type": "string", "required": True, "description": {"en": "Virtual path to list /team/.../list/...", "de": "Pfad zur Liste", "fr": "Chemin vers la liste"}},
|
||||
{"name": "page", "type": "number", "required": False, "description": {"en": "Page", "de": "Seite", "fr": "Page"}, "default": 0},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "description": {"en": "Include closed", "de": "Erledigte einbeziehen", "fr": "Inclure terminées"}, "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "listTasks",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"path": "pathQuery",
|
||||
"page": "page",
|
||||
"includeClosed": "includeClosed",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.getTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Get task", "de": "Aufgabe abrufen", "fr": "Obtenir la tâche"},
|
||||
"description": {"en": "Get one task by ID or path", "de": "Eine Aufgabe abrufen", "fr": "Obtenir une tâche"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path .../task/{id}", "de": "Oder Pfad .../task/{id}", "fr": "Ou chemin .../task/{id}"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "getTask",
|
||||
"_paramMap": {"connectionId": "connectionReference", "taskId": "taskId", "path": "pathQuery"},
|
||||
},
|
||||
{
|
||||
"id": "clickup.createTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Create task", "de": "Aufgabe erstellen", "fr": "Créer une tâche"},
|
||||
"description": {"en": "Create a task in a list", "de": "Aufgabe in einer Liste erstellen", "fr": "Créer une tâche dans une liste"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "teamId", "type": "string", "required": False, "description": {"en": "Workspace (team) for list picker", "de": "Workspace für Listen-Auswahl", "fr": "Équipe"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Optional path /team/.../list/...", "de": "Optional: Pfad zur Liste", "fr": "Chemin optionnel"}},
|
||||
{"name": "listId", "type": "string", "required": False, "description": {"en": "List ID", "de": "Listen-ID", "fr": "ID liste"}},
|
||||
{"name": "name", "type": "string", "required": True, "description": {"en": "Task name", "de": "Name", "fr": "Nom"}},
|
||||
{"name": "description", "type": "string", "required": False, "description": {"en": "Description", "de": "Beschreibung", "fr": "Description"}},
|
||||
{"name": "taskStatus", "type": "string", "required": False, "description": {"en": "Status (list status name)", "de": "Status (wie in der Liste)", "fr": "Statut"}},
|
||||
{"name": "taskPriority", "type": "string", "required": False, "description": {"en": "1–4 or empty", "de": "1–4 oder leer", "fr": "1–4"}},
|
||||
{"name": "taskDueDateMs", "type": "string", "required": False, "description": {"en": "Due date (Unix ms)", "de": "Fälligkeit (ms)", "fr": "Échéance (ms)"}},
|
||||
{"name": "taskAssigneeIds", "type": "object", "required": False, "description": {"en": "Assignee user ids", "de": "Zugewiesene (User-IDs)", "fr": "Assignés"}},
|
||||
{"name": "taskTimeEstimateMs", "type": "string", "required": False, "description": {"en": "Time estimate (ms)", "de": "Zeitschätzung (ms)", "fr": "Estimation (ms)"}},
|
||||
{"name": "taskTimeEstimateHours", "type": "string", "required": False, "description": {"en": "Time estimate (hours)", "de": "Zeitschätzung (Stunden)", "fr": "Heures"}},
|
||||
{"name": "customFieldValues", "type": "object", "required": False, "description": {"en": "Custom field id → value", "de": "Benutzerdefinierte Felder", "fr": "Champs personnalisés"}},
|
||||
{"name": "taskFields", "type": "string", "required": False, "description": {"en": "Extra JSON (advanced)", "de": "Zusätzliches JSON (fortgeschritten)", "fr": "JSON avancé"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "createTask",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"teamId": "teamId",
|
||||
"path": "pathQuery",
|
||||
"listId": "listId",
|
||||
"name": "name",
|
||||
"description": "description",
|
||||
"taskStatus": "taskStatus",
|
||||
"taskPriority": "taskPriority",
|
||||
"taskDueDateMs": "taskDueDateMs",
|
||||
"taskAssigneeIds": "taskAssigneeIds",
|
||||
"taskTimeEstimateMs": "taskTimeEstimateMs",
|
||||
"taskTimeEstimateHours": "taskTimeEstimateHours",
|
||||
"customFieldValues": "customFieldValues",
|
||||
"taskFields": "taskFields",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.updateTask",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Update task", "de": "Aufgabe aktualisieren", "fr": "Mettre à jour la tâche"},
|
||||
"description": {
|
||||
"en": "Update task fields (rows or JSON)",
|
||||
"de": "Felder der Aufgabe ändern (Zeilen oder JSON)",
|
||||
"fr": "Mettre à jour les champs (lignes ou JSON)",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path to task", "de": "Oder Pfad", "fr": "Ou chemin"}},
|
||||
{
|
||||
"name": "taskUpdateEntries",
|
||||
"type": "object",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "List of {fieldKey, value, customFieldId?}",
|
||||
"de": "Liste der zu ändernden Felder (fieldKey, value, optional customFieldId)",
|
||||
"fr": "Liste de champs à mettre à jour",
|
||||
},
|
||||
},
|
||||
{"name": "taskUpdate", "type": "string", "required": False, "description": {"en": "JSON body for API (optional if rows set)", "de": "JSON für API (optional wenn Zeilen gesetzt)", "fr": "Corps JSON"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "updateTask",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"taskId": "taskId",
|
||||
"path": "path",
|
||||
"taskUpdate": "taskUpdate",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "clickup.uploadAttachment",
|
||||
"category": "clickup",
|
||||
"label": {"en": "Upload attachment", "de": "Anhang hochladen", "fr": "Téléverser pièce jointe"},
|
||||
"description": {"en": "Upload file to a task (upstream file)", "de": "Datei an Task anhängen", "fr": "Joindre un fichier à la tâche"},
|
||||
"parameters": [
|
||||
{"name": "connectionId", "type": "string", "required": True, "description": {"en": "ClickUp connection", "de": "ClickUp-Verbindung", "fr": "Connexion ClickUp"}},
|
||||
{"name": "taskId", "type": "string", "required": False, "description": {"en": "Task ID", "de": "Task-ID", "fr": "ID tâche"}},
|
||||
{"name": "path", "type": "string", "required": False, "description": {"en": "Or path to task", "de": "Oder Pfad", "fr": "Ou chemin"}},
|
||||
{"name": "fileName", "type": "string", "required": False, "description": {"en": "File name", "de": "Dateiname", "fr": "Nom du fichier"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-attachment", "color": "#7B68EE"},
|
||||
"_method": "clickup",
|
||||
"_action": "uploadAttachment",
|
||||
"_paramMap": {
|
||||
"connectionId": "connectionReference",
|
||||
"taskId": "taskId",
|
||||
"path": "path",
|
||||
"fileName": "fileName",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Data transformation node definitions.
|
||||
|
||||
DATA_NODES = [
|
||||
{
|
||||
"id": "data.setFields",
|
||||
"category": "data",
|
||||
"label": {"en": "Set Fields", "de": "Felder setzen", "fr": "Définir champs"},
|
||||
"description": {"en": "Set or override fields on payload", "de": "Felder setzen oder überschreiben", "fr": "Définir ou écraser des champs"},
|
||||
"parameters": [
|
||||
{"name": "fields", "type": "object", "required": True, "description": {"en": "Key-value pairs", "de": "Schlüssel-Wert-Paare", "fr": "Paires clé-valeur"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-pencil", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.filter",
|
||||
"category": "data",
|
||||
"label": {"en": "Filter", "de": "Filtern", "fr": "Filtrer"},
|
||||
"description": {"en": "Filter array by condition", "de": "Array nach Bedingung filtern", "fr": "Filtrer tableau par condition"},
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "description": {"en": "Expression (e.g. item.active == true)", "de": "Bedingung", "fr": "Condition"}},
|
||||
{"name": "itemsPath", "type": "string", "required": False, "description": {"en": "Path to array", "de": "Pfad zum Array", "fr": "Chemin vers le tableau"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-filter", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.parseJson",
|
||||
"category": "data",
|
||||
"label": {"en": "Parse JSON", "de": "JSON parsen", "fr": "Parser JSON"},
|
||||
"description": {"en": "Parse JSON string to object", "de": "JSON-String in Objekt parsen", "fr": "Parser chaîne JSON en objet"},
|
||||
"parameters": [
|
||||
{"name": "jsonPath", "type": "string", "required": False, "description": {"en": "Path to JSON string (default: input)", "de": "Pfad zum JSON", "fr": "Chemin vers JSON"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-code-json", "color": "#673AB7"},
|
||||
},
|
||||
{
|
||||
"id": "data.template",
|
||||
"category": "data",
|
||||
"label": {"en": "Template / Interpolation", "de": "Vorlage / Interpolation", "fr": "Modèle / Interpolation"},
|
||||
"description": {"en": "Text with {{placeholder}} substitution", "de": "Text mit {{platzhalter}}-Ersetzung", "fr": "Texte avec substitution {{placeholder}}"},
|
||||
"parameters": [
|
||||
{"name": "template", "type": "string", "required": True, "description": {"en": "Template (use {{path}} for values)", "de": "Vorlage", "fr": "Modèle"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-format-text", "color": "#673AB7"},
|
||||
},
|
||||
]
|
||||
60
modules/features/automation2/nodeDefinitions/file.py
Normal file
60
modules/features/automation2/nodeDefinitions/file.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# File node definitions - create files from context (e.g. from AI nodes).
|
||||
|
||||
FILE_NODES = [
|
||||
{
|
||||
"id": "file.create",
|
||||
"category": "file",
|
||||
"label": {"en": "Create File", "de": "Datei erstellen", "fr": "Créer fichier"},
|
||||
"description": {
|
||||
"en": "Create a file from context (text/markdown from AI). Configurable format and style.",
|
||||
"de": "Erstellt eine Datei aus Kontext (Text/Markdown von KI). Format und Stil konfigurierbar.",
|
||||
"fr": "Crée un fichier à partir du contexte. Format et style configurables.",
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "contentSources",
|
||||
"type": "json",
|
||||
"required": False,
|
||||
"description": {
|
||||
"en": "Array of context refs (e.g. AI, form). Concatenated in order. Empty = from connected node.",
|
||||
"de": "Liste von Kontext-Quellen (z.B. KI, Formular). Werden nacheinander zusammengefügt. Leer = vom verbundenen Node.",
|
||||
"fr": "Liste de sources de contexte. Concaténées dans l'ordre. Vide = du noeud connecté.",
|
||||
},
|
||||
"default": [],
|
||||
},
|
||||
{
|
||||
"name": "outputFormat",
|
||||
"type": "string",
|
||||
"required": True,
|
||||
"description": {"en": "Output format", "de": "Ausgabeformat", "fr": "Format de sortie"},
|
||||
"default": "docx",
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Document title", "de": "Dokumenttitel", "fr": "Titre du document"},
|
||||
},
|
||||
{
|
||||
"name": "templateName",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Style preset: default, corporate, minimal", "de": "Stil-Vorlage", "fr": "Prését style"},
|
||||
},
|
||||
{
|
||||
"name": "language",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Language code (de, en, fr)", "de": "Sprachcode", "fr": "Code langue"},
|
||||
"default": "de",
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3"},
|
||||
"_method": "file",
|
||||
"_action": "create",
|
||||
"_paramMap": {},
|
||||
},
|
||||
]
|
||||
|
|
@ -12,6 +12,7 @@ FLOW_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 2,
|
||||
"outputLabels": {"en": ["Yes", "No"], "de": ["Ja", "Nein"], "fr": ["Oui", "Non"]},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800"},
|
||||
},
|
||||
|
|
@ -29,19 +30,6 @@ FLOW_NODES = [
|
|||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.merge",
|
||||
"category": "flow",
|
||||
"label": {"en": "Merge", "de": "Zusammenführen", "fr": "Fusionner"},
|
||||
"description": {"en": "Merge multiple inputs", "de": "Mehrere Eingaben zusammenführen", "fr": "Fusionner plusieurs entrées"},
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "description": {"en": "append | combine", "de": "Modus", "fr": "Mode"}},
|
||||
],
|
||||
"inputs": 2,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-merge", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.loop",
|
||||
"category": "flow",
|
||||
|
|
@ -55,28 +43,4 @@ FLOW_NODES = [
|
|||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.wait",
|
||||
"category": "flow",
|
||||
"label": {"en": "Wait / Delay", "de": "Warten / Verzögerung", "fr": "Attendre / Délai"},
|
||||
"description": {"en": "Pause for duration", "de": "Pause für Dauer", "fr": "Pause pour durée"},
|
||||
"parameters": [
|
||||
{"name": "seconds", "type": "number", "required": True, "description": {"en": "Seconds to wait", "de": "Sekunden", "fr": "Secondes"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-timer", "color": "#FF9800"},
|
||||
},
|
||||
{
|
||||
"id": "flow.stop",
|
||||
"category": "flow",
|
||||
"label": {"en": "Stop / Terminate", "de": "Stopp / Beenden", "fr": "Arrêter / Terminer"},
|
||||
"description": {"en": "Stop workflow execution", "de": "Workflow-Ausführung beenden", "fr": "Arrêter l'exécution"},
|
||||
"parameters": [],
|
||||
"inputs": 1,
|
||||
"outputs": 0,
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-stop", "color": "#F44336"},
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -12,7 +12,11 @@ INPUT_NODES = [
|
|||
"name": "fields",
|
||||
"type": "json",
|
||||
"required": True,
|
||||
"description": {"en": "Form fields: [{name, type, label, required, options?}]", "de": "Formularfelder", "fr": "Champs du formulaire"},
|
||||
"description": {
|
||||
"en": "Form fields: [{name, type, label, required, options?}]. type may include clickup_tasks with clickupConnectionId + clickupListId for a ClickUp task dropdown (value {add, rem}).",
|
||||
"de": "Formularfelder. type: u. a. clickup_tasks mit clickupConnectionId und clickupListId für ClickUp-Aufgaben-Dropdown (Wert wie Relationship-Feld).",
|
||||
"fr": "Champs du formulaire",
|
||||
},
|
||||
"default": [],
|
||||
},
|
||||
],
|
||||
|
|
@ -42,7 +46,8 @@ INPUT_NODES = [
|
|||
"label": {"en": "Upload", "de": "Upload", "fr": "Téléversement"},
|
||||
"description": {"en": "User uploads file(s)", "de": "Benutzer lädt Datei(en) hoch", "fr": "L'utilisateur téléverse des fichiers"},
|
||||
"parameters": [
|
||||
{"name": "accept", "type": "string", "required": False, "description": {"en": "MIME types (e.g. .pdf,image/*)", "de": "MIME-Typen", "fr": "Types MIME"}, "default": ""},
|
||||
{"name": "accept", "type": "string", "required": False, "description": {"en": "Accept string for file input (e.g. .pdf,image/*)", "de": "Accept-String für Dateiauswahl", "fr": "Chaîne accept"}, "default": ""},
|
||||
{"name": "allowedTypes", "type": "json", "required": False, "description": {"en": "Selected file types (from UI multi-select)", "de": "Ausgewählte Dateitypen", "fr": "Types sélectionnés"}, "default": []},
|
||||
{"name": "maxSize", "type": "number", "required": False, "description": {"en": "Max file size in MB", "de": "Max. Dateigröße in MB", "fr": "Taille max en Mo"}, "default": 10},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "description": {"en": "Allow multiple files", "de": "Mehrere Dateien erlauben", "fr": "Autoriser plusieurs fichiers"}, "default": False},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Trigger node definitions - workflow entry points.
|
||||
# Canvas start nodes — variant reflects workflow configuration (gear in editor).
|
||||
|
||||
TRIGGER_NODES = [
|
||||
{
|
||||
"id": "trigger.manual",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Manual Trigger", "de": "Manueller Trigger", "fr": "Déclencheur manuel"},
|
||||
"description": {"en": "Start workflow on button press", "de": "Startet den Workflow bei Knopfdruck", "fr": "Démarre le workflow sur clic"},
|
||||
"label": {"en": "Start", "de": "Start", "fr": "Départ"},
|
||||
"description": {
|
||||
"en": "Manual, API, or background triggers (webhook, email, …).",
|
||||
"de": "Manuell, API oder Hintergrund-Starts (Webhook, E-Mail, …).",
|
||||
"fr": "Manuel, API ou déclencheurs en arrière-plan.",
|
||||
},
|
||||
"parameters": [],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
|
|
@ -14,29 +18,47 @@ TRIGGER_NODES = [
|
|||
"meta": {"icon": "mdi-play", "color": "#4CAF50"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.schedule",
|
||||
"id": "trigger.form",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Schedule", "de": "Zeitplan", "fr": "Planification"},
|
||||
"description": {"en": "Run on a cron schedule", "de": "Läuft nach Cron-Zeitplan", "fr": "S'exécute selon un cron"},
|
||||
"label": {"en": "Start (form)", "de": "Start (Formular)", "fr": "Départ (formulaire)"},
|
||||
"description": {
|
||||
"en": "Form fields are filled at run time; configure fields on this node.",
|
||||
"de": "Felder werden beim Start befüllt; konfigurieren Sie die Felder auf dieser Node.",
|
||||
"fr": "Les champs sont remplis au démarrage.",
|
||||
},
|
||||
"parameters": [
|
||||
{"name": "cron", "type": "string", "required": True, "description": {"en": "Cron expression (e.g. 0 9 * * * for daily at 9)", "de": "Cron-Ausdruck", "fr": "Expression cron"}},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.formSubmit",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Form Submit", "de": "Formular-Absendung", "fr": "Soumission formulaire"},
|
||||
"description": {"en": "Start when form is submitted", "de": "Startet bei Formular-Absendung", "fr": "Démarre à la soumission du formulaire"},
|
||||
"parameters": [
|
||||
{"name": "formId", "type": "string", "required": True, "description": {"en": "Form identifier", "de": "Formular-ID", "fr": "Identifiant du formulaire"}},
|
||||
{
|
||||
"name": "formFields",
|
||||
"type": "json",
|
||||
"required": False,
|
||||
"description": {"en": "Field definitions", "de": "Felddefinitionen", "fr": "Définitions"},
|
||||
},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0"},
|
||||
},
|
||||
{
|
||||
"id": "trigger.schedule",
|
||||
"category": "trigger",
|
||||
"label": {"en": "Start (schedule)", "de": "Start (Zeitplan)", "fr": "Départ (planification)"},
|
||||
"description": {
|
||||
"en": "Cron expression for scheduled runs (configure on this node).",
|
||||
"de": "Cron-Ausdruck für geplante Läufe.",
|
||||
"fr": "Expression cron pour les exécutions planifiées.",
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "cron",
|
||||
"type": "string",
|
||||
"required": False,
|
||||
"description": {"en": "Cron expression", "de": "Cron-Ausdruck", "fr": "Expression cron"},
|
||||
},
|
||||
],
|
||||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
|||
out["label"] = node["label"].get(lang, node["label"].get("en", str(node["label"])))
|
||||
if isinstance(node.get("description"), dict):
|
||||
out["description"] = node["description"].get(lang, node["description"].get("en", str(node["description"])))
|
||||
ol = node.get("outputLabels")
|
||||
if isinstance(ol, dict) and ol:
|
||||
first = next(iter(ol.values()), None)
|
||||
if isinstance(first, (list, tuple)):
|
||||
out["outputLabels"] = ol.get(lang, ol.get("en", list(first)))
|
||||
params = []
|
||||
for p in node.get("parameters", []):
|
||||
pc = dict(p)
|
||||
|
|
@ -61,8 +66,10 @@ def getNodeTypesForApi(
|
|||
{"id": "flow", "label": {"en": "Flow", "de": "Ablauf", "fr": "Flux"}},
|
||||
{"id": "data", "label": {"en": "Data", "de": "Daten", "fr": "Données"}},
|
||||
{"id": "ai", "label": {"en": "AI", "de": "KI", "fr": "IA"}},
|
||||
{"id": "file", "label": {"en": "File", "de": "Datei", "fr": "Fichier"}},
|
||||
{"id": "email", "label": {"en": "Email", "de": "E-Mail", "fr": "Email"}},
|
||||
{"id": "sharepoint", "label": {"en": "SharePoint", "de": "SharePoint", "fr": "SharePoint"}},
|
||||
{"id": "clickup", "label": {"en": "ClickUp", "de": "ClickUp", "fr": "ClickUp"}},
|
||||
]
|
||||
return {"nodeTypes": localized, "categories": categories}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ Automation2 routes - node-types, execute, workflows, runs, tasks, connections, b
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
|
|
@ -13,9 +15,75 @@ from modules.features.automation2.mainAutomation2 import getAutomation2Services
|
|||
from modules.features.automation2.nodeRegistry import getNodeTypesForApi
|
||||
from modules.features.automation2.interfaceFeatureAutomation2 import getAutomation2Interface
|
||||
from modules.workflows.automation2.executionEngine import executeGraph
|
||||
from modules.workflows.automation2.runEnvelope import (
|
||||
default_run_envelope,
|
||||
merge_run_envelope,
|
||||
normalize_run_envelope,
|
||||
)
|
||||
from modules.features.automation2.entryPoints import find_invocation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _build_execute_run_envelope(
|
||||
body: Dict[str, Any],
|
||||
workflow: Optional[Dict[str, Any]],
|
||||
user_id: Optional[str],
|
||||
) -> Dict[str, Any]:
|
||||
"""Build normalized run envelope from POST /execute body."""
|
||||
if isinstance(body.get("runEnvelope"), dict):
|
||||
env = normalize_run_envelope(body["runEnvelope"], user_id=user_id)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
env = merge_run_envelope(env, {"payload": pl})
|
||||
return env
|
||||
|
||||
entry_point_id = body.get("entryPointId")
|
||||
if entry_point_id:
|
||||
if not workflow:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="entryPointId requires a saved workflow (workflowId must refer to a stored workflow)",
|
||||
)
|
||||
inv = find_invocation(workflow, entry_point_id)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=400, detail="entryPointId not found on workflow")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="entry point is disabled")
|
||||
kind = inv.get("kind", "manual")
|
||||
trig_map = {
|
||||
"manual": "manual",
|
||||
"form": "form",
|
||||
"schedule": "schedule",
|
||||
"always_on": "event",
|
||||
"email": "email",
|
||||
"webhook": "webhook",
|
||||
"api": "api",
|
||||
"event": "event",
|
||||
}
|
||||
trig = trig_map.get(kind, "manual")
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
base = default_run_envelope(
|
||||
trig,
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
base = merge_run_envelope(base, {"payload": pl})
|
||||
return normalize_run_envelope(base, user_id=user_id)
|
||||
|
||||
env = normalize_run_envelope(None, user_id=user_id)
|
||||
pl = body.get("payload")
|
||||
if isinstance(pl, dict):
|
||||
env = merge_run_envelope(env, {"payload": pl})
|
||||
return env
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/automation2",
|
||||
tags=["Automation2"],
|
||||
|
|
@ -55,6 +123,26 @@ def get_automation2_info(
|
|||
}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/schedule-sync")
|
||||
@limiter.limit("10/minute")
|
||||
def post_schedule_sync(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Manually trigger schedule sync (re-register cron jobs for all schedule workflows)."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.workflows.automation2.subAutomation2Schedule import sync_automation2_schedule_events
|
||||
|
||||
root = getRootInterface()
|
||||
event_user = root.getUserByUsername("event")
|
||||
if not event_user:
|
||||
return {"success": False, "error": "Event user not available", "synced": 0}
|
||||
result = sync_automation2_schedule_events(event_user)
|
||||
return {"success": True, **result}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/node-types")
|
||||
@limiter.limit("60/minute")
|
||||
def get_node_types(
|
||||
|
|
@ -109,6 +197,10 @@ async def post_execute(
|
|||
graph = body.get("graph") or body
|
||||
workflowId = body.get("workflowId")
|
||||
req_nodes = graph.get("nodes") or []
|
||||
workflow_for_envelope: Optional[Dict[str, Any]] = None
|
||||
if workflowId and not str(workflowId).startswith("transient-"):
|
||||
a2_pre = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
workflow_for_envelope = a2_pre.getWorkflow(workflowId)
|
||||
# When workflowId is set: prefer graph from request (current editor state) if it has nodes.
|
||||
# Only fall back to stored workflow graph when request graph is empty (e.g. resume from email).
|
||||
if workflowId and len(req_nodes) == 0:
|
||||
|
|
@ -117,6 +209,7 @@ async def post_execute(
|
|||
if wf and wf.get("graph"):
|
||||
graph = wf["graph"]
|
||||
logger.info("automation2 execute: loaded graph from workflow %s", workflowId)
|
||||
workflow_for_envelope = wf
|
||||
# Use transient workflowId when none provided (e.g. execute from editor without save)
|
||||
# Required for email.checkEmail pause/resume - run must be created
|
||||
if not workflowId:
|
||||
|
|
@ -132,6 +225,8 @@ async def post_execute(
|
|||
workflowId,
|
||||
mandateId,
|
||||
)
|
||||
run_env = _build_execute_run_envelope(body, workflow_for_envelope, userId)
|
||||
|
||||
a2_interface = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
result = await executeGraph(
|
||||
graph=graph,
|
||||
|
|
@ -141,6 +236,7 @@ async def post_execute(
|
|||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2_interface,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
logger.info(
|
||||
"automation2 execute result: success=%s error=%s nodeOutputs_keys=%s failedNode=%s paused=%s",
|
||||
|
|
@ -239,6 +335,7 @@ async def list_connection_services(
|
|||
services = provider.getAvailableServices()
|
||||
_serviceLabels = {
|
||||
"sharepoint": "SharePoint",
|
||||
"clickup": "ClickUp",
|
||||
"outlook": "Outlook",
|
||||
"teams": "Teams",
|
||||
"onedrive": "OneDrive",
|
||||
|
|
@ -248,6 +345,7 @@ async def list_connection_services(
|
|||
}
|
||||
_serviceIcons = {
|
||||
"sharepoint": "sharepoint",
|
||||
"clickup": "folder",
|
||||
"outlook": "mail",
|
||||
"teams": "chat",
|
||||
"onedrive": "cloud",
|
||||
|
|
@ -342,15 +440,17 @@ def _get_node_label_from_graph(graph: dict, nodeId: str) -> str:
|
|||
def get_workflows(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""List all workflows for this feature instance.
|
||||
Enriches each workflow with runCount, isRunning, stuckAtNodeId, stuckAtNodeLabel,
|
||||
createdAt, lastStartedAt.
|
||||
Query param active: filter by active status (true|false).
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
items = a2.getWorkflows()
|
||||
items = a2.getWorkflows(active=active)
|
||||
enriched = []
|
||||
for wf in items:
|
||||
wf_id = wf.get("id")
|
||||
|
|
@ -447,11 +547,163 @@ def delete_workflow(
|
|||
return {"success": True}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/workflows/{workflowId}/webhooks/{entryPointId}")
|
||||
@limiter.limit("60/minute")
|
||||
async def post_workflow_webhook(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
workflowId: str = Path(..., description="Workflow ID"),
|
||||
entryPointId: str = Path(..., description="Entry point ID (kind must be webhook)"),
|
||||
body: dict = Body(default_factory=dict),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""
|
||||
Invoke a workflow via a webhook entry point. Optional shared secret in
|
||||
X-Automation2-Webhook-Secret or X-Webhook-Secret when config.webhookSecret is set.
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
userId = str(context.user.id) if context.user else None
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
wf = a2.getWorkflow(workflowId)
|
||||
if not wf or not wf.get("graph"):
|
||||
raise HTTPException(status_code=404, detail="Workflow not found")
|
||||
inv = find_invocation(wf, entryPointId)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=404, detail="Entry point not found")
|
||||
if inv.get("kind") != "webhook":
|
||||
raise HTTPException(status_code=400, detail="Entry point is not a webhook")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="Entry point is disabled")
|
||||
cfg = inv.get("config") or {}
|
||||
secret = cfg.get("webhookSecret")
|
||||
if secret:
|
||||
hdr = request.headers.get("X-Automation2-Webhook-Secret") or request.headers.get(
|
||||
"X-Webhook-Secret"
|
||||
)
|
||||
if hdr != str(secret):
|
||||
raise HTTPException(status_code=403, detail="Invalid webhook secret")
|
||||
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
|
||||
|
||||
discoverMethods(services)
|
||||
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
pl = body if isinstance(body, dict) else {}
|
||||
base = default_run_envelope(
|
||||
"webhook",
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
payload=pl,
|
||||
raw={"httpBody": body},
|
||||
)
|
||||
run_env = normalize_run_envelope(base, user_id=userId)
|
||||
|
||||
result = await executeGraph(
|
||||
graph=wf["graph"],
|
||||
services=services,
|
||||
workflowId=workflowId,
|
||||
instanceId=instanceId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/{instanceId}/workflows/{workflowId}/forms/{entryPointId}/submit")
|
||||
@limiter.limit("60/minute")
|
||||
async def post_workflow_form_submit(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
workflowId: str = Path(..., description="Workflow ID"),
|
||||
entryPointId: str = Path(..., description="Entry point ID (kind must be form)"),
|
||||
body: dict = Body(default_factory=dict),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Form-style submit: same as execute with trigger.type form and payload from body."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
userId = str(context.user.id) if context.user else None
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
wf = a2.getWorkflow(workflowId)
|
||||
if not wf or not wf.get("graph"):
|
||||
raise HTTPException(status_code=404, detail="Workflow not found")
|
||||
inv = find_invocation(wf, entryPointId)
|
||||
if not inv:
|
||||
raise HTTPException(status_code=404, detail="Entry point not found")
|
||||
if inv.get("kind") != "form":
|
||||
raise HTTPException(status_code=400, detail="Entry point is not a form")
|
||||
if not inv.get("enabled", True):
|
||||
raise HTTPException(status_code=400, detail="Entry point is disabled")
|
||||
|
||||
services = getAutomation2Services(
|
||||
context.user,
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
)
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
|
||||
|
||||
discoverMethods(services)
|
||||
|
||||
title = inv.get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
pl = body if isinstance(body, dict) else {}
|
||||
base = default_run_envelope(
|
||||
"form",
|
||||
entry_point_id=inv.get("id"),
|
||||
entry_point_label=label or None,
|
||||
payload=pl,
|
||||
raw={"formBody": body},
|
||||
)
|
||||
run_env = normalize_run_envelope(base, user_id=userId)
|
||||
|
||||
result = await executeGraph(
|
||||
graph=wf["graph"],
|
||||
services=services,
|
||||
workflowId=workflowId,
|
||||
instanceId=instanceId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
automation2_interface=a2,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Runs and Resume
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
|
||||
@router.get("/{instanceId}/runs/completed")
|
||||
@limiter.limit("60/minute")
|
||||
def get_completed_runs(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
limit: int = Query(20, ge=1, le=50),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Get recently completed runs with output (for Tasks page output section)."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
|
||||
runs = a2.getRecentCompletedRuns(limit=limit)
|
||||
return {"runs": runs}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/workflows/{workflowId}/runs")
|
||||
@limiter.limit("60/minute")
|
||||
def get_workflow_runs(
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ _SOURCE_TYPE_TO_SERVICE = {
|
|||
"googleDriveFolder": "drive",
|
||||
"gmailFolder": "gmail",
|
||||
"ftpFolder": "files",
|
||||
"clickupList": "clickup",
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
288
modules/routes/routeClickup.py
Normal file
288
modules/routes/routeClickup.py
Normal file
|
|
@ -0,0 +1,288 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp API routes — teams, hierarchy, lists, tasks (connection-scoped)."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from modules.auth import getCurrentUser, limiter
|
||||
from modules.datamodels.datamodelUam import AuthAuthority, User, UserConnection
|
||||
from modules.interfaces.interfaceDbApp import getInterface
|
||||
from modules.serviceHub import getInterface as getServices
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/clickup",
|
||||
tags=["ClickUp"],
|
||||
responses={
|
||||
404: {"description": "Not found"},
|
||||
400: {"description": "Bad request"},
|
||||
401: {"description": "Unauthorized"},
|
||||
500: {"description": "Internal server error"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _getUserConnection(interface, connection_id: str, user_id: str) -> Optional[UserConnection]:
|
||||
try:
|
||||
connections = interface.getUserConnections(user_id)
|
||||
for conn in connections:
|
||||
if conn.id == connection_id:
|
||||
return conn
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user connection: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _clickup_connection_or_404(interface, connection_id: str, user_id: str) -> UserConnection:
|
||||
connection = _getUserConnection(interface, connection_id, user_id)
|
||||
if not connection:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Connection not found")
|
||||
authority = connection.authority.value if hasattr(connection.authority, "value") else str(connection.authority)
|
||||
if authority.lower() != AuthAuthority.CLICKUP.value:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Connection is not a ClickUp connection",
|
||||
)
|
||||
return connection
|
||||
|
||||
|
||||
def _svc_for_connection(current_user: User, connection: UserConnection):
|
||||
services = getServices(current_user, None)
|
||||
if not services.clickup.setAccessTokenFromConnection(connection):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Failed to set ClickUp access token",
|
||||
)
|
||||
return services.clickup
|
||||
|
||||
|
||||
# --- Routes (prefix is /api/clickup; OAuth lives under /api/clickup/auth/* in routeSecurityClickup) ---
|
||||
|
||||
|
||||
@router.get("/{connectionId}/teams", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def get_teams(
|
||||
request: Request,
|
||||
connectionId: str = Path(..., description="ClickUp UserConnection id"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getAuthorizedTeams()
|
||||
|
||||
|
||||
@router.get("/{connectionId}/teams/{teamId}", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_team(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
teamId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
"""Workspace/team details including members (for assignee pickers)."""
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getTeam(teamId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/teams/{teamId}/spaces", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_spaces(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
teamId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getSpaces(teamId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/spaces/{spaceId}/folders", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_folders(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
spaceId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getFolders(spaceId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/spaces/{spaceId}/lists", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_folderless_lists(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
spaceId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getFolderlessLists(spaceId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/folders/{folderId}/lists", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_lists_in_folder(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
folderId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getListsInFolder(folderId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/lists/{listId}", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_list(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
listId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getList(listId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/lists/{listId}/fields", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_list_fields(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
listId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getListFields(listId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/lists/{listId}/tasks", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_list_tasks(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
listId: str = Path(...),
|
||||
page: int = Query(0),
|
||||
include_closed: bool = Query(False),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getTasksInList(listId, page=page, include_closed=include_closed)
|
||||
|
||||
|
||||
class TaskCreateBody(BaseModel):
|
||||
body: Dict[str, Any]
|
||||
|
||||
|
||||
@router.post("/{connectionId}/lists/{listId}/tasks", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def create_list_task(
|
||||
request: Request,
|
||||
payload: TaskCreateBody,
|
||||
connectionId: str = Path(...),
|
||||
listId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.createTask(listId, payload.body)
|
||||
|
||||
|
||||
class TaskUpdateBody(BaseModel):
|
||||
body: Dict[str, Any]
|
||||
|
||||
|
||||
@router.get("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def get_task(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
taskId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getTask(taskId)
|
||||
|
||||
|
||||
@router.put("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def update_task(
|
||||
request: Request,
|
||||
payload: TaskUpdateBody,
|
||||
connectionId: str = Path(...),
|
||||
taskId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.updateTask(taskId, payload.body)
|
||||
|
||||
|
||||
@router.delete("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def delete_task(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
taskId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.deleteTask(taskId)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/teams/{teamId}/tasks/search", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def search_team_tasks(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
teamId: str = Path(...),
|
||||
query: str = Query(..., description="Search query"),
|
||||
page: int = Query(0),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.searchTeamTasks(teamId, query=query, page=page)
|
||||
|
||||
|
||||
@router.get("/{connectionId}/user", response_model=Dict[str, Any])
|
||||
@limiter.limit("30/minute")
|
||||
async def get_authorized_user(
|
||||
request: Request,
|
||||
connectionId: str = Path(...),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> Dict[str, Any]:
|
||||
interface = getInterface(currentUser)
|
||||
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
|
||||
cu = _svc_for_connection(currentUser, conn)
|
||||
return await cu.getAuthorizedUser()
|
||||
|
|
@ -112,7 +112,8 @@ def get_auth_authority_options(
|
|||
authorityLabels = {
|
||||
"local": "Local",
|
||||
"google": "Google",
|
||||
"msft": "Microsoft"
|
||||
"msft": "Microsoft",
|
||||
"clickup": "ClickUp",
|
||||
}
|
||||
return [
|
||||
{"value": auth.value, "label": authorityLabels.get(auth.value, auth.value)}
|
||||
|
|
@ -347,7 +348,8 @@ def create_connection(
|
|||
# Map type to authority
|
||||
authority_map = {
|
||||
'msft': AuthAuthority.MSFT,
|
||||
'google': AuthAuthority.GOOGLE
|
||||
'google': AuthAuthority.GOOGLE,
|
||||
'clickup': AuthAuthority.CLICKUP,
|
||||
}
|
||||
|
||||
authority = authority_map.get(connection_data.get('type'))
|
||||
|
|
@ -493,6 +495,8 @@ def connect_service(
|
|||
auth_url = f"/api/msft/auth/connect?connectionId={quote(connectionId, safe='')}"
|
||||
elif connection.authority == AuthAuthority.GOOGLE:
|
||||
auth_url = f"/api/google/auth/connect?connectionId={quote(connectionId, safe='')}"
|
||||
elif connection.authority == AuthAuthority.CLICKUP:
|
||||
auth_url = f"/api/clickup/auth/connect?connectionId={quote(connectionId, safe='')}"
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
|
|||
280
modules/routes/routeSecurityClickup.py
Normal file
280
modules/routes/routeSecurityClickup.py
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp OAuth for data connections (UserConnection + Token)."""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request, status, Depends, Query
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from urllib.parse import urlencode
|
||||
import httpx
|
||||
from jose import jwt as jose_jwt
|
||||
from jose import JWTError
|
||||
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.interfaces.interfaceDbApp import getInterface, getRootInterface
|
||||
from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatus, UserConnection
|
||||
from modules.datamodels.datamodelSecurity import Token, TokenPurpose
|
||||
from modules.auth import getCurrentUser, limiter, SECRET_KEY, ALGORITHM
|
||||
from modules.shared.timeUtils import createExpirationTimestamp, getUtcTimestamp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_FLOW_CONNECT = "clickup_connect"
|
||||
|
||||
CLICKUP_AUTH_BASE = "https://app.clickup.com/api"
|
||||
CLICKUP_API_BASE = "https://api.clickup.com/api/v2"
|
||||
|
||||
CLIENT_ID = APP_CONFIG.get("Service_CLICKUP_CLIENT_ID")
|
||||
CLIENT_SECRET = APP_CONFIG.get("Service_CLICKUP_CLIENT_SECRET")
|
||||
REDIRECT_URI = APP_CONFIG.get("Service_CLICKUP_OAUTH_REDIRECT_URI")
|
||||
|
||||
# ClickUp states OAuth access tokens do not expire today; store a long horizon for DB status.
|
||||
_CLICKUP_TOKEN_EXPIRES_IN_SEC = 10 * 365 * 24 * 3600
|
||||
|
||||
|
||||
def _issue_oauth_state(claims: Dict[str, Any]) -> str:
|
||||
body = {**claims, "exp": int(time.time()) + 600}
|
||||
return jose_jwt.encode(body, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
|
||||
def _parse_oauth_state(state: str) -> Dict[str, Any]:
|
||||
try:
|
||||
return jose_jwt.decode(state, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=f"Invalid OAuth state: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
def _require_clickup_config():
|
||||
if not CLIENT_ID or not CLIENT_SECRET or not REDIRECT_URI:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="ClickUp OAuth is not configured (Service_CLICKUP_CLIENT_ID, Service_CLICKUP_CLIENT_SECRET, Service_CLICKUP_OAUTH_REDIRECT_URI)",
|
||||
)
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/clickup",
|
||||
tags=["Security ClickUp"],
|
||||
responses={
|
||||
404: {"description": "Not found"},
|
||||
400: {"description": "Bad request"},
|
||||
401: {"description": "Unauthorized"},
|
||||
500: {"description": "Internal server error"},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/auth/connect")
|
||||
@limiter.limit("5/minute")
|
||||
def auth_connect(
|
||||
request: Request,
|
||||
connectionId: str = Query(..., description="UserConnection id"),
|
||||
currentUser: User = Depends(getCurrentUser),
|
||||
) -> RedirectResponse:
|
||||
"""Start ClickUp OAuth for an existing connection (requires gateway session)."""
|
||||
try:
|
||||
_require_clickup_config()
|
||||
interface = getInterface(currentUser)
|
||||
connections = interface.getUserConnections(currentUser.id)
|
||||
connection = None
|
||||
for conn in connections:
|
||||
if conn.id == connectionId and conn.authority == AuthAuthority.CLICKUP:
|
||||
connection = conn
|
||||
break
|
||||
if not connection:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="ClickUp connection not found")
|
||||
|
||||
state_jwt = _issue_oauth_state(
|
||||
{
|
||||
"flow": _FLOW_CONNECT,
|
||||
"connectionId": connectionId,
|
||||
"userId": str(currentUser.id),
|
||||
}
|
||||
)
|
||||
query = urlencode(
|
||||
{
|
||||
"client_id": CLIENT_ID,
|
||||
"redirect_uri": REDIRECT_URI,
|
||||
"state": state_jwt,
|
||||
}
|
||||
)
|
||||
auth_url = f"{CLICKUP_AUTH_BASE}?{query}"
|
||||
return RedirectResponse(auth_url)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error initiating ClickUp connect: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to initiate ClickUp connect: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/auth/connect/callback")
|
||||
async def auth_connect_callback(
|
||||
code: str = Query(...),
|
||||
state: str = Query(...),
|
||||
) -> HTMLResponse:
|
||||
"""OAuth callback for ClickUp data connection."""
|
||||
state_data = _parse_oauth_state(state)
|
||||
if state_data.get("flow") != _FLOW_CONNECT:
|
||||
raise HTTPException(status_code=400, detail="Invalid OAuth flow for this callback")
|
||||
connection_id = state_data.get("connectionId")
|
||||
user_id = state_data.get("userId")
|
||||
if not connection_id or not user_id:
|
||||
raise HTTPException(status_code=400, detail="Missing connection or user in OAuth state")
|
||||
|
||||
_require_clickup_config()
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_resp = await client.post(
|
||||
f"{CLICKUP_API_BASE}/oauth/token",
|
||||
json={
|
||||
"client_id": CLIENT_ID,
|
||||
"client_secret": CLIENT_SECRET,
|
||||
"code": code,
|
||||
},
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if token_resp.status_code != 200:
|
||||
logger.error(f"ClickUp token exchange failed: {token_resp.status_code} {token_resp.text}")
|
||||
return HTMLResponse(
|
||||
content=f"<html><body><h1>Connection Failed</h1><p>{token_resp.text}</p></body></html>",
|
||||
status_code=400,
|
||||
)
|
||||
token_json = token_resp.json()
|
||||
access_token = token_json.get("access_token")
|
||||
if not access_token:
|
||||
return HTMLResponse(
|
||||
content="<html><body><h1>Connection Failed</h1><p>No access token.</p></body></html>",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
user_resp = await client.get(
|
||||
f"{CLICKUP_API_BASE}/user",
|
||||
headers={
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=30.0,
|
||||
)
|
||||
if user_resp.status_code != 200:
|
||||
logger.error(f"ClickUp user failed: {user_resp.status_code} {user_resp.text}")
|
||||
return HTMLResponse(
|
||||
content="<html><body><h1>Connection Failed</h1><p>Could not load ClickUp user.</p></body></html>",
|
||||
status_code=400,
|
||||
)
|
||||
user_payload = user_resp.json()
|
||||
cu_user = user_payload.get("user") or {}
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
user = rootInterface.getUser(user_id)
|
||||
if not user:
|
||||
return HTMLResponse(
|
||||
content="""
|
||||
<html><body><script>
|
||||
if (window.opener) {
|
||||
window.opener.postMessage({ type: 'clickup_connection_error', error: 'User not found' }, '*');
|
||||
setTimeout(() => window.close(), 1000);
|
||||
} else window.close();
|
||||
</script></body></html>
|
||||
""",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
interface = getInterface(user)
|
||||
connections = interface.getUserConnections(user_id)
|
||||
connection = None
|
||||
for conn in connections:
|
||||
if conn.id == connection_id:
|
||||
connection = conn
|
||||
break
|
||||
if not connection:
|
||||
return HTMLResponse(
|
||||
content="""
|
||||
<html><body><script>
|
||||
if (window.opener) {
|
||||
window.opener.postMessage({ type: 'clickup_connection_error', error: 'Connection not found' }, '*');
|
||||
setTimeout(() => window.close(), 1000);
|
||||
} else window.close();
|
||||
</script></body></html>
|
||||
""",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
ext_id = str(cu_user.get("id", "")) if cu_user.get("id") is not None else ""
|
||||
username = cu_user.get("username") or cu_user.get("email") or ext_id
|
||||
email = cu_user.get("email")
|
||||
|
||||
expires_at = createExpirationTimestamp(_CLICKUP_TOKEN_EXPIRES_IN_SEC)
|
||||
|
||||
try:
|
||||
connection.status = ConnectionStatus.ACTIVE
|
||||
connection.lastChecked = getUtcTimestamp()
|
||||
connection.expiresAt = expires_at
|
||||
connection.externalId = ext_id
|
||||
connection.externalUsername = username
|
||||
if email:
|
||||
connection.externalEmail = email
|
||||
connection.grantedScopes = None
|
||||
rootInterface.db.recordModify(UserConnection, connection_id, connection.model_dump())
|
||||
|
||||
token = Token(
|
||||
userId=user.id,
|
||||
authority=AuthAuthority.CLICKUP,
|
||||
connectionId=connection_id,
|
||||
tokenPurpose=TokenPurpose.DATA_CONNECTION,
|
||||
tokenAccess=access_token,
|
||||
tokenRefresh=None,
|
||||
tokenType="bearer",
|
||||
expiresAt=expires_at,
|
||||
createdAt=getUtcTimestamp(),
|
||||
)
|
||||
interface.saveConnectionToken(token)
|
||||
|
||||
return HTMLResponse(
|
||||
content=f"""
|
||||
<html>
|
||||
<head><title>Connection Successful</title></head>
|
||||
<body>
|
||||
<script>
|
||||
if (window.opener) {{
|
||||
window.opener.postMessage({{
|
||||
type: 'clickup_connection_success',
|
||||
connection: {{
|
||||
id: '{connection.id}',
|
||||
status: 'connected',
|
||||
type: 'clickup',
|
||||
lastChecked: {getUtcTimestamp()},
|
||||
expiresAt: {expires_at}
|
||||
}}
|
||||
}}, '*');
|
||||
setTimeout(() => window.close(), 1000);
|
||||
}} else {{
|
||||
window.close();
|
||||
}}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating ClickUp connection: {str(e)}", exc_info=True)
|
||||
return HTMLResponse(
|
||||
content=f"""
|
||||
<html><body><script>
|
||||
if (window.opener) {{
|
||||
window.opener.postMessage({{ type: 'clickup_connection_error', error: {json.dumps(str(e))} }}, '*');
|
||||
setTimeout(() => window.close(), 1000);
|
||||
}} else window.close();
|
||||
</script></body></html>
|
||||
""",
|
||||
status_code=500,
|
||||
)
|
||||
|
|
@ -63,6 +63,13 @@ IMPORTABLE_SERVICES: Dict[str, Dict[str, Any]] = {
|
|||
"objectKey": "service.sharepoint",
|
||||
"label": {"en": "SharePoint", "de": "SharePoint", "fr": "SharePoint"},
|
||||
},
|
||||
"clickup": {
|
||||
"module": "modules.serviceCenter.services.serviceClickup.mainServiceClickup",
|
||||
"class": "ClickupService",
|
||||
"dependencies": ["security"],
|
||||
"objectKey": "service.clickup",
|
||||
"label": {"en": "ClickUp", "de": "ClickUp", "fr": "ClickUp"},
|
||||
},
|
||||
"chat": {
|
||||
"module": "modules.serviceCenter.services.serviceChat.mainServiceChat",
|
||||
"class": "ChatService",
|
||||
|
|
|
|||
|
|
@ -1552,6 +1552,7 @@ def _registerCoreTools(registry: ToolRegistry, services):
|
|||
"googleDriveFolder": "drive",
|
||||
"gmailFolder": "gmail",
|
||||
"ftpFolder": "files",
|
||||
"clickupList": "clickup",
|
||||
}
|
||||
|
||||
async def _resolveDataSource(dsId: str):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp service."""
|
||||
|
||||
from .mainServiceClickup import ClickupService, clickup_authorization_header
|
||||
|
||||
__all__ = ["ClickupService", "clickup_authorization_header"]
|
||||
|
|
@ -0,0 +1,223 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp API service (OAuth or personal token via UserConnection)."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
import aiohttp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CLICKUP_API_BASE = "https://api.clickup.com/api/v2"
|
||||
|
||||
|
||||
def clickup_authorization_header(token: str) -> str:
|
||||
"""ClickUp: personal tokens are `pk_...` without Bearer; OAuth uses Bearer."""
|
||||
t = (token or "").strip()
|
||||
if t.startswith("pk_"):
|
||||
return t
|
||||
return f"Bearer {t}"
|
||||
|
||||
|
||||
class ClickupService:
|
||||
"""ClickUp REST API v2 — teams, hierarchy, lists as tables (tasks + custom fields)."""
|
||||
|
||||
def __init__(self, context, get_service: Callable[[str], Any]):
|
||||
self._context = context
|
||||
self._get_service = get_service
|
||||
self.accessToken: Optional[str] = None
|
||||
|
||||
def setAccessTokenFromConnection(self, userConnection) -> bool:
|
||||
"""Load OAuth/personal token from SecurityService for this UserConnection."""
|
||||
try:
|
||||
if not userConnection:
|
||||
logger.error("UserConnection is required to set access token")
|
||||
return False
|
||||
if isinstance(userConnection, dict):
|
||||
connection_id = userConnection.get("id")
|
||||
else:
|
||||
connection_id = getattr(userConnection, "id", None)
|
||||
if not connection_id:
|
||||
logger.error("UserConnection must have an 'id' field")
|
||||
return False
|
||||
security = self._get_service("security")
|
||||
if not security:
|
||||
logger.error("Security service not available for token access")
|
||||
return False
|
||||
token = security.getFreshToken(connection_id)
|
||||
if not token:
|
||||
logger.error(f"No token found for connection {connection_id}")
|
||||
return False
|
||||
self.accessToken = token.tokenAccess
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting ClickUp access token: {e}")
|
||||
return False
|
||||
|
||||
def setAccessToken(self, token: str) -> None:
|
||||
"""Set token directly (e.g. connector adapter)."""
|
||||
self.accessToken = token
|
||||
|
||||
async def _request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
data: Optional[aiohttp.FormData] = None,
|
||||
) -> Union[Dict[str, Any], List[Any], bytes, None]:
|
||||
if not self.accessToken:
|
||||
return {"error": "Access token is not set. Call setAccessTokenFromConnection first."}
|
||||
url = f"{_CLICKUP_API_BASE}/{path.lstrip('/')}"
|
||||
headers: Dict[str, str] = {
|
||||
"Authorization": clickup_authorization_header(self.accessToken),
|
||||
}
|
||||
if json_body is not None:
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
timeout = aiohttp.ClientTimeout(total=60)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
kwargs: Dict[str, Any] = {"headers": headers, "params": params}
|
||||
if json_body is not None:
|
||||
kwargs["json"] = json_body
|
||||
if data is not None:
|
||||
kwargs["data"] = data
|
||||
|
||||
async with session.request(method.upper(), url, **kwargs) as resp:
|
||||
if resp.status == 204:
|
||||
return {}
|
||||
text = await resp.text()
|
||||
if resp.status >= 400:
|
||||
# 404 on GET is common (wrong id / preview) — avoid ERROR noise in logs
|
||||
log = logger.warning if resp.status == 404 else logger.error
|
||||
log(f"ClickUp API {method} {url} -> {resp.status}: {text[:500]}")
|
||||
return {"error": f"HTTP {resp.status}", "body": text}
|
||||
if not text:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(text)
|
||||
except Exception:
|
||||
return {"raw": text}
|
||||
except asyncio.TimeoutError:
|
||||
return {"error": f"ClickUp API timeout: {path}"}
|
||||
except Exception as e:
|
||||
logger.error(f"ClickUp API error: {e}")
|
||||
return {"error": str(e)}
|
||||
|
||||
async def requestRaw(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
*,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
json_body: Optional[Dict[str, Any]] = None,
|
||||
) -> Union[Dict[str, Any], List[Any], None]:
|
||||
"""Escape hatch: call any v2 path under /api/v2 (path without leading /api/v2)."""
|
||||
return await self._request(method, path, params=params, json_body=json_body)
|
||||
|
||||
# --- Teams / user ---
|
||||
|
||||
async def getAuthorizedUser(self) -> Dict[str, Any]:
|
||||
return await self._request("GET", "/user")
|
||||
|
||||
async def getAuthorizedTeams(self) -> Dict[str, Any]:
|
||||
return await self._request("GET", "/team")
|
||||
|
||||
async def getTeam(self, team_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/team/{team_id}")
|
||||
|
||||
# --- Hierarchy ---
|
||||
|
||||
async def getSpaces(self, team_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/team/{team_id}/space")
|
||||
|
||||
async def getSpace(self, space_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/space/{space_id}")
|
||||
|
||||
async def getFolders(self, space_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/space/{space_id}/folder")
|
||||
|
||||
async def getFolder(self, folder_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/folder/{folder_id}")
|
||||
|
||||
async def getListsInFolder(self, folder_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/folder/{folder_id}/list")
|
||||
|
||||
async def getFolderlessLists(self, space_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/space/{space_id}/list")
|
||||
|
||||
async def getList(self, list_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/list/{list_id}")
|
||||
|
||||
async def getListFields(self, list_id: str) -> Dict[str, Any]:
|
||||
return await self._request("GET", f"/list/{list_id}/field")
|
||||
|
||||
# --- Tasks (rows) ---
|
||||
|
||||
async def getTasksInList(
|
||||
self,
|
||||
list_id: str,
|
||||
*,
|
||||
page: int = 0,
|
||||
include_closed: bool = False,
|
||||
subtasks: bool = True,
|
||||
) -> Dict[str, Any]:
|
||||
params: Dict[str, Any] = {
|
||||
"page": page,
|
||||
"subtasks": str(subtasks).lower(),
|
||||
"include_closed": str(include_closed).lower(),
|
||||
}
|
||||
return await self._request("GET", f"/list/{list_id}/task", params=params)
|
||||
|
||||
async def getTask(self, task_id: str, *, include_subtasks: bool = True) -> Dict[str, Any]:
|
||||
params = {"include_subtasks": str(include_subtasks).lower()}
|
||||
return await self._request("GET", f"/task/{task_id}", params=params)
|
||||
|
||||
async def createTask(self, list_id: str, body: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return await self._request("POST", f"/list/{list_id}/task", json_body=body)
|
||||
|
||||
async def updateTask(self, task_id: str, body: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return await self._request("PUT", f"/task/{task_id}", json_body=body)
|
||||
|
||||
async def deleteTask(self, task_id: str) -> Dict[str, Any]:
|
||||
return await self._request("DELETE", f"/task/{task_id}")
|
||||
|
||||
async def searchTeamTasks(
|
||||
self,
|
||||
team_id: str,
|
||||
*,
|
||||
query: str,
|
||||
page: int = 0,
|
||||
) -> Dict[str, Any]:
|
||||
"""Search tasks in a workspace (team)."""
|
||||
params = {"query": query, "page": page}
|
||||
return await self._request("GET", f"/team/{team_id}/task", params=params)
|
||||
|
||||
async def uploadTaskAttachment(self, task_id: str, file_bytes: bytes, file_name: str) -> Dict[str, Any]:
|
||||
"""Upload a file attachment to a task (multipart)."""
|
||||
if not self.accessToken:
|
||||
return {"error": "Access token is not set."}
|
||||
url = f"{_CLICKUP_API_BASE}/task/{task_id}/attachment"
|
||||
headers = {"Authorization": clickup_authorization_header(self.accessToken)}
|
||||
data = aiohttp.FormData()
|
||||
data.add_field(
|
||||
"attachment",
|
||||
file_bytes,
|
||||
filename=file_name,
|
||||
content_type="application/octet-stream",
|
||||
)
|
||||
timeout = aiohttp.ClientTimeout(total=120)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
async with session.post(url, headers=headers, data=data) as resp:
|
||||
text = await resp.text()
|
||||
if resp.status >= 400:
|
||||
return {"error": f"HTTP {resp.status}", "body": text}
|
||||
return json.loads(text) if text else {}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
|
@ -125,9 +125,12 @@ class RendererDocx(BaseRenderer):
|
|||
self.logger.debug(f"_generateDocxFromJson: Document created in {time.time() - start_time:.2f}s")
|
||||
|
||||
# Get style set: use styles from metadata if available, otherwise enhance with AI
|
||||
template_from_metadata = None
|
||||
if json_content and isinstance(json_content.get("metadata"), dict):
|
||||
template_from_metadata = json_content["metadata"].get("templateName")
|
||||
style_start = time.time()
|
||||
self.logger.debug("_generateDocxFromJson: About to get style set")
|
||||
styleSet = await self._getStyleSet(json_content, userPrompt, aiService)
|
||||
styleSet = await self._getStyleSet(json_content, userPrompt, aiService, templateName=template_from_metadata)
|
||||
self.logger.debug(f"_generateDocxFromJson: Style set retrieved in {time.time() - style_start:.2f}s")
|
||||
|
||||
# Setup basic document styles and create all styles from style set
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@
|
|||
Text renderer for report generation.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .documentRendererBaseTemplate import BaseRenderer
|
||||
from modules.datamodels.datamodelDocument import RenderedDocument
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
|
@ -93,9 +95,13 @@ class RendererText(BaseRenderer):
|
|||
metadata = extractedContent.get("metadata", {}) if extractedContent else {}
|
||||
documentType = metadata.get("documentType") if isinstance(metadata, dict) else None
|
||||
|
||||
# UTF-8 BOM helps editors/browsers recognize encoding (fixes grössten → grössten)
|
||||
text_bytes = textContent.encode('utf-8')
|
||||
if not text_bytes.startswith(b'\xef\xbb\xbf'):
|
||||
text_bytes = b'\xef\xbb\xbf' + text_bytes
|
||||
return [
|
||||
RenderedDocument(
|
||||
documentData=textContent.encode('utf-8'),
|
||||
documentData=text_bytes,
|
||||
mimeType="text/plain",
|
||||
filename=filename,
|
||||
documentType=documentType,
|
||||
|
|
@ -276,7 +282,7 @@ class RendererText(BaseRenderer):
|
|||
return ""
|
||||
|
||||
def _renderJsonBulletList(self, listData: Dict[str, Any]) -> str:
|
||||
"""Render a JSON bullet list to text."""
|
||||
"""Render a JSON bullet list to text. Strips markdown from item text."""
|
||||
try:
|
||||
# Extract from nested content structure: element.content.{items}
|
||||
content = listData.get("content", {})
|
||||
|
|
@ -290,9 +296,9 @@ class RendererText(BaseRenderer):
|
|||
textParts = []
|
||||
for item in items:
|
||||
if isinstance(item, str):
|
||||
textParts.append(f"- {item}")
|
||||
textParts.append(f"- {self._stripMarkdownForPlainText(item)}")
|
||||
elif isinstance(item, dict) and "text" in item:
|
||||
textParts.append(f"- {item['text']}")
|
||||
textParts.append(f"- {self._stripMarkdownForPlainText(item['text'])}")
|
||||
|
||||
return '\n'.join(textParts)
|
||||
|
||||
|
|
@ -301,13 +307,13 @@ class RendererText(BaseRenderer):
|
|||
return ""
|
||||
|
||||
def _renderJsonHeading(self, headingData: Dict[str, Any]) -> str:
|
||||
"""Render a JSON heading to text."""
|
||||
"""Render a JSON heading to text. Strips markdown from heading text."""
|
||||
try:
|
||||
# Extract from nested content structure: element.content.{text, level}
|
||||
content = headingData.get("content", {})
|
||||
if not isinstance(content, dict):
|
||||
return ""
|
||||
text = content.get("text", "")
|
||||
text = self._stripMarkdownForPlainText(content.get("text", ""))
|
||||
level = content.get("level", 1)
|
||||
|
||||
if text:
|
||||
|
|
@ -325,8 +331,22 @@ class RendererText(BaseRenderer):
|
|||
self.logger.warning(f"Error rendering heading: {str(e)}")
|
||||
return ""
|
||||
|
||||
def _stripMarkdownForPlainText(self, text: str) -> str:
|
||||
"""Strip markdown formatting for plain text output (**bold** -> bold, *italic* -> italic)."""
|
||||
if not text:
|
||||
return ""
|
||||
# **bold** and __bold__ -> plain
|
||||
text = re.sub(r'\*\*(.+?)\*\*', r'\1', text)
|
||||
text = re.sub(r'__(.+?)__', r'\1', text)
|
||||
# *italic* and _italic_ -> plain
|
||||
text = re.sub(r'(?<!\*)\*(?!\*)(.+?)(?<!\*)\*(?!\*)', r'\1', text)
|
||||
text = re.sub(r'(?<!_)_(?!_)([^_]+)(?<!_)_(?!_)', r'\1', text)
|
||||
# `code` -> plain
|
||||
text = re.sub(r'`([^`]+)`', r'\1', text)
|
||||
return text.strip()
|
||||
|
||||
def _renderJsonParagraph(self, paragraphData: Dict[str, Any]) -> str:
|
||||
"""Render a JSON paragraph to text."""
|
||||
"""Render a JSON paragraph to text. Strips markdown for plain text output."""
|
||||
try:
|
||||
# Extract from nested content structure
|
||||
content = paragraphData.get("content", {})
|
||||
|
|
@ -336,7 +356,7 @@ class RendererText(BaseRenderer):
|
|||
text = content
|
||||
else:
|
||||
text = ""
|
||||
return text if text else ""
|
||||
return self._stripMarkdownForPlainText(text) if text else ""
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error rendering paragraph: {str(e)}")
|
||||
|
|
|
|||
|
|
@ -3,10 +3,155 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def markdownToDocumentJson(markdown: str, title: str, language: str = "de") -> Dict[str, Any]:
|
||||
"""
|
||||
Convert markdown content to the standard document JSON format expected by renderReport.
|
||||
Supports headings, code blocks, tables, lists, images (file: refs), paragraphs.
|
||||
For plain text: wraps entire content in a single paragraph section.
|
||||
"""
|
||||
if not isinstance(markdown, str):
|
||||
markdown = str(markdown) if markdown else ""
|
||||
|
||||
sections = []
|
||||
order = 0
|
||||
lines = markdown.split("\n")
|
||||
i = 0
|
||||
|
||||
def _nextId():
|
||||
nonlocal order
|
||||
order += 1
|
||||
return f"s_{order}"
|
||||
|
||||
while i < len(lines):
|
||||
line = lines[i]
|
||||
|
||||
# Headings
|
||||
headingMatch = re.match(r"^(#{1,6})\s+(.+)", line)
|
||||
if headingMatch:
|
||||
level = len(headingMatch.group(1))
|
||||
text = headingMatch.group(2).strip()
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "heading", "order": order,
|
||||
"elements": [{"content": {"text": text, "level": level}}],
|
||||
})
|
||||
i += 1
|
||||
continue
|
||||
|
||||
# Fenced code blocks
|
||||
codeMatch = re.match(r"^```(\w*)", line)
|
||||
if codeMatch:
|
||||
lang = codeMatch.group(1) or "text"
|
||||
codeLines = []
|
||||
i += 1
|
||||
while i < len(lines) and not lines[i].startswith("```"):
|
||||
codeLines.append(lines[i])
|
||||
i += 1
|
||||
i += 1
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "code_block", "order": order,
|
||||
"elements": [{"content": {"code": "\n".join(codeLines), "language": lang}}],
|
||||
})
|
||||
continue
|
||||
|
||||
# Tables
|
||||
tableMatch = re.match(r"^\|(.+)\|$", line)
|
||||
if tableMatch and (i + 1) < len(lines) and re.match(r"^\|[\s\-:|]+\|$", lines[i + 1]):
|
||||
headerCells = [c.strip() for c in tableMatch.group(1).split("|")]
|
||||
i += 2
|
||||
rows = []
|
||||
while i < len(lines) and re.match(r"^\|(.+)\|$", lines[i]):
|
||||
rowCells = [c.strip() for c in lines[i][1:-1].split("|")]
|
||||
rows.append(rowCells)
|
||||
i += 1
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "table", "order": order,
|
||||
"elements": [{"content": {"headers": headerCells, "rows": rows}}],
|
||||
})
|
||||
continue
|
||||
|
||||
# Bullet / numbered lists
|
||||
listMatch = re.match(r"^(\s*)([-*+]|\d+[.)]) (.+)", line)
|
||||
if listMatch:
|
||||
isNumbered = bool(re.match(r"\d+[.)]", listMatch.group(2)))
|
||||
items = []
|
||||
while i < len(lines) and re.match(r"^(\s*)([-*+]|\d+[.)]) (.+)", lines[i]):
|
||||
m = re.match(r"^(\s*)([-*+]|\d+[.)]) (.+)", lines[i])
|
||||
items.append({"text": m.group(3).strip()})
|
||||
i += 1
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "bullet_list", "order": order,
|
||||
"elements": [{"content": {"items": items, "list_type": "numbered" if isNumbered else "bullet"}}],
|
||||
})
|
||||
continue
|
||||
|
||||
# Empty lines
|
||||
if not line.strip():
|
||||
i += 1
|
||||
continue
|
||||
|
||||
# Images (simplified: store as paragraph with ref for now - full resolution needs Knowledge Store)
|
||||
imgMatch = re.match(r"^!\[([^\]]*)\]\(([^)]+)\)", line)
|
||||
if imgMatch:
|
||||
altText = imgMatch.group(1).strip() or "Image"
|
||||
src = imgMatch.group(2).strip()
|
||||
fileId = src[5:] if src.startswith("file:") else ""
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "image", "order": order,
|
||||
"elements": [{
|
||||
"content": {
|
||||
"altText": altText,
|
||||
"base64Data": "",
|
||||
"_fileRef": fileId,
|
||||
"_srcUrl": src if not fileId else "",
|
||||
}
|
||||
}],
|
||||
})
|
||||
i += 1
|
||||
continue
|
||||
|
||||
# Paragraph
|
||||
paraLines = []
|
||||
while i < len(lines) and lines[i].strip() and not re.match(
|
||||
r"^(#{1,6}\s|```|\|.+\||!\[|(\s*)([-*+]|\d+[.)]) )", lines[i]
|
||||
):
|
||||
paraLines.append(lines[i])
|
||||
i += 1
|
||||
if paraLines:
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "paragraph", "order": order,
|
||||
"elements": [{"content": {"text": " ".join(paraLines)}}],
|
||||
})
|
||||
continue
|
||||
|
||||
i += 1
|
||||
|
||||
if not sections:
|
||||
sections.append({
|
||||
"id": _nextId(), "content_type": "paragraph", "order": order,
|
||||
"elements": [{"content": {"text": markdown.strip() or "(empty)"}}],
|
||||
})
|
||||
|
||||
return {
|
||||
"metadata": {
|
||||
"split_strategy": "single_document",
|
||||
"source_documents": [],
|
||||
"extraction_method": "file_create_rendering",
|
||||
"title": title,
|
||||
"language": language,
|
||||
},
|
||||
"documents": [{
|
||||
"id": "doc_1",
|
||||
"title": title,
|
||||
"sections": sections,
|
||||
}],
|
||||
}
|
||||
|
||||
def getFileExtension(fileName: str) -> str:
|
||||
"""Extract file extension from fileName (without dot, lowercased)."""
|
||||
if '.' in fileName:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Callable, Optional, Dict, Any
|
||||
|
||||
|
|
@ -25,14 +26,28 @@ class EventManagement:
|
|||
def __init__(self, timezone: str = "Europe/Zurich"):
|
||||
self._timezone = ZoneInfo(timezone)
|
||||
self._scheduler: Optional[AsyncIOScheduler] = None
|
||||
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
|
||||
def set_event_loop(self, loop: asyncio.AbstractEventLoop) -> None:
|
||||
"""Set the event loop for the scheduler (call from lifespan before start)."""
|
||||
self._event_loop = loop
|
||||
|
||||
@property
|
||||
def scheduler(self) -> AsyncIOScheduler:
|
||||
if self._scheduler is None:
|
||||
self._scheduler = AsyncIOScheduler(timezone=self._timezone)
|
||||
kwargs = {"timezone": self._timezone}
|
||||
if self._event_loop is not None:
|
||||
kwargs["event_loop"] = self._event_loop
|
||||
self._scheduler = AsyncIOScheduler(**kwargs)
|
||||
return self._scheduler
|
||||
|
||||
def start(self) -> None:
|
||||
if self._event_loop is None:
|
||||
try:
|
||||
self._event_loop = asyncio.get_running_loop()
|
||||
logger.debug("EventManagement: using get_running_loop()")
|
||||
except RuntimeError:
|
||||
pass
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info("EventManagement scheduler started")
|
||||
|
|
@ -90,10 +105,18 @@ class EventManagement:
|
|||
) -> None:
|
||||
"""
|
||||
Register a job using IntervalTrigger.
|
||||
Only passes non-None interval components (IntervalTrigger fails on None).
|
||||
"""
|
||||
trigger = IntervalTrigger(
|
||||
seconds=seconds, minutes=minutes, hours=hours, timezone=self._timezone
|
||||
)
|
||||
trigger_kwargs: Dict[str, Any] = {"timezone": self._timezone}
|
||||
if seconds is not None:
|
||||
trigger_kwargs["seconds"] = seconds
|
||||
if minutes is not None:
|
||||
trigger_kwargs["minutes"] = minutes
|
||||
if hours is not None:
|
||||
trigger_kwargs["hours"] = hours
|
||||
if len(trigger_kwargs) <= 1:
|
||||
raise ValueError("At least one of seconds, minutes, hours must be provided")
|
||||
trigger = IntervalTrigger(**trigger_kwargs)
|
||||
self.scheduler.add_job(
|
||||
func,
|
||||
trigger,
|
||||
|
|
|
|||
174
modules/workflows/automation2/clickupTaskUpdateMerge.py
Normal file
174
modules/workflows/automation2/clickupTaskUpdateMerge.py
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Merge clickup.updateTask node parameter taskUpdateEntries into taskUpdate JSON.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _unwrap_value(v: Any) -> Any:
|
||||
if isinstance(v, dict) and v.get("type") == "value" and "value" in v:
|
||||
return v.get("value")
|
||||
return v
|
||||
|
||||
|
||||
def _unwrap_dynamic(v: Any) -> Any:
|
||||
return _unwrap_value(v)
|
||||
|
||||
|
||||
def _parse_int_list(val: Any) -> List[int]:
|
||||
if val is None:
|
||||
return []
|
||||
val = _unwrap_value(val)
|
||||
if isinstance(val, str) and val.strip():
|
||||
try:
|
||||
parsed = json.loads(val)
|
||||
if isinstance(parsed, list):
|
||||
return [int(x) for x in parsed if x is not None and str(x).strip() != ""]
|
||||
except (json.JSONDecodeError, ValueError, TypeError):
|
||||
return []
|
||||
if isinstance(val, list):
|
||||
out: List[int] = []
|
||||
for x in val:
|
||||
if x is None or (isinstance(x, str) and not x.strip()):
|
||||
continue
|
||||
try:
|
||||
out.append(int(x))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
return out
|
||||
return []
|
||||
|
||||
|
||||
def _parse_due_date_ms(v: Any) -> Optional[int]:
|
||||
v = _unwrap_value(v)
|
||||
if v is None or v == "":
|
||||
return None
|
||||
if isinstance(v, str) and len(v) >= 10 and v[4] == "-" and v[7] == "-":
|
||||
try:
|
||||
dt = datetime.strptime(v[:10], "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
||||
return int(dt.timestamp() * 1000)
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
i = int(float(v))
|
||||
return i if i > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_time_estimate_hours_to_ms(v: Any) -> Optional[int]:
|
||||
v = _unwrap_value(v)
|
||||
if v is None or v == "":
|
||||
return None
|
||||
try:
|
||||
h = float(v)
|
||||
if h < 0:
|
||||
return None
|
||||
return int(round(h * 3600 * 1000))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def merge_clickup_task_update_entries(resolved_params: Dict[str, Any]) -> None:
|
||||
"""
|
||||
Pop taskUpdateEntries from resolved_params and merge into taskUpdate (dict or JSON string).
|
||||
Existing taskUpdate (advanced JSON) is the base; entry rows override by key.
|
||||
"""
|
||||
entries = resolved_params.pop("taskUpdateEntries", None)
|
||||
json_raw = resolved_params.get("taskUpdate")
|
||||
base: Dict[str, Any] = {}
|
||||
if isinstance(json_raw, str) and json_raw.strip():
|
||||
try:
|
||||
parsed = json.loads(json_raw)
|
||||
if isinstance(parsed, dict):
|
||||
base = dict(parsed)
|
||||
except json.JSONDecodeError:
|
||||
logger.warning("clickup.updateTask: taskUpdate is not valid JSON, ignoring base")
|
||||
elif isinstance(json_raw, dict):
|
||||
base = dict(json_raw)
|
||||
|
||||
if not isinstance(entries, list) or not entries:
|
||||
if not base and json_raw not in (None, "", {}):
|
||||
resolved_params["taskUpdate"] = json_raw
|
||||
elif base:
|
||||
resolved_params["taskUpdate"] = json.dumps(base, ensure_ascii=False)
|
||||
return
|
||||
|
||||
overlay: Dict[str, Any] = {}
|
||||
custom_rows: List[Dict[str, Any]] = []
|
||||
|
||||
for row in entries:
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
fk = row.get("fieldKey") or row.get("field")
|
||||
if fk is None:
|
||||
continue
|
||||
fk = str(fk).strip()
|
||||
val = _unwrap_dynamic(row.get("value"))
|
||||
|
||||
if fk == "custom_field":
|
||||
cfid = _unwrap_dynamic(row.get("customFieldId"))
|
||||
if not cfid or not str(cfid).strip():
|
||||
continue
|
||||
if val is None or val == "":
|
||||
continue
|
||||
custom_rows.append({"id": str(cfid).strip(), "value": val})
|
||||
continue
|
||||
|
||||
if fk == "name" and val is not None and str(val).strip():
|
||||
overlay["name"] = str(val).strip()
|
||||
elif fk == "description":
|
||||
overlay["description"] = "" if val is None else str(val)
|
||||
elif fk == "status" and val is not None and str(val).strip():
|
||||
overlay["status"] = str(val).strip()
|
||||
elif fk == "priority":
|
||||
if val is None or val == "":
|
||||
continue
|
||||
try:
|
||||
pi = int(float(val))
|
||||
if 1 <= pi <= 4:
|
||||
overlay["priority"] = pi
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif fk == "due_date":
|
||||
dms = _parse_due_date_ms(val)
|
||||
if dms is not None:
|
||||
overlay["due_date"] = dms
|
||||
elif fk == "time_estimate_h":
|
||||
tms = _parse_time_estimate_hours_to_ms(val)
|
||||
if tms is not None:
|
||||
overlay["time_estimate"] = tms
|
||||
elif fk == "time_estimate_ms":
|
||||
if val is None or val == "":
|
||||
continue
|
||||
try:
|
||||
tms = int(float(val))
|
||||
if tms > 0:
|
||||
overlay["time_estimate"] = tms
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif fk == "assignees":
|
||||
ids = _parse_int_list(val)
|
||||
if ids:
|
||||
overlay["assignees"] = ids
|
||||
else:
|
||||
logger.debug("clickup.updateTask: unknown fieldKey %s", fk)
|
||||
|
||||
merged = {**base, **overlay}
|
||||
|
||||
if custom_rows:
|
||||
by_id: Dict[str, Dict[str, Any]] = {}
|
||||
existing = merged.get("custom_fields")
|
||||
if isinstance(existing, list):
|
||||
for x in existing:
|
||||
if isinstance(x, dict) and x.get("id") is not None:
|
||||
by_id[str(x["id"])] = x
|
||||
for x in custom_rows:
|
||||
by_id[str(x["id"])] = x
|
||||
merged["custom_fields"] = list(by_id.values())
|
||||
|
||||
resolved_params["taskUpdate"] = json.dumps(merged, ensure_ascii=False) if merged else ""
|
||||
|
|
@ -11,18 +11,19 @@ from modules.workflows.automation2.graphUtils import (
|
|||
validateGraph,
|
||||
topoSort,
|
||||
getInputSources,
|
||||
getLoopBodyNodeIds,
|
||||
)
|
||||
|
||||
from modules.workflows.automation2.executors import (
|
||||
TriggerExecutor,
|
||||
FlowExecutor,
|
||||
DataExecutor,
|
||||
ActionNodeExecutor,
|
||||
InputExecutor,
|
||||
PauseForHumanTaskError,
|
||||
PauseForEmailWaitError,
|
||||
)
|
||||
from modules.features.automation2.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.workflows.automation2.runEnvelope import normalize_run_envelope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -32,6 +33,38 @@ def _getNodeTypeIds(services: Any = None) -> Set[str]:
|
|||
return {n["id"] for n in STATIC_NODE_TYPES}
|
||||
|
||||
|
||||
def _is_node_on_active_path(
|
||||
nodeId: str,
|
||||
connectionMap: Dict[str, List],
|
||||
nodeOutputs: Dict[str, Any],
|
||||
) -> bool:
|
||||
"""
|
||||
Return True if this node receives input only from active branches.
|
||||
- flow.ifElse: only one output (0=yes, 1=no) is active; uses "branch".
|
||||
- flow.switch: only one output (0, 1, 2, ...) is active; uses "match".
|
||||
Nodes connected to inactive outputs must be skipped.
|
||||
Also skip when a predecessor was skipped (not in nodeOutputs).
|
||||
"""
|
||||
for src, source_output, _ in connectionMap.get(nodeId, []):
|
||||
out = nodeOutputs.get(src)
|
||||
if out is None:
|
||||
return False
|
||||
if not isinstance(out, dict):
|
||||
continue
|
||||
branch = out.get("branch")
|
||||
match = out.get("match")
|
||||
active_output = None
|
||||
if branch is not None:
|
||||
active_output = branch
|
||||
elif match is not None:
|
||||
if match < 0:
|
||||
return False # switch: no case matched, skip all downstream
|
||||
active_output = match
|
||||
if active_output is not None and source_output != active_output:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _getExecutor(
|
||||
nodeType: str,
|
||||
services: Any,
|
||||
|
|
@ -42,9 +75,7 @@ def _getExecutor(
|
|||
return TriggerExecutor()
|
||||
if nodeType.startswith("flow."):
|
||||
return FlowExecutor()
|
||||
if nodeType.startswith("data."):
|
||||
return DataExecutor()
|
||||
if nodeType.startswith("ai.") or nodeType.startswith("email.") or nodeType.startswith("sharepoint."):
|
||||
if nodeType.startswith("ai.") or nodeType.startswith("email.") or nodeType.startswith("sharepoint.") or nodeType.startswith("clickup.") or nodeType.startswith("file."):
|
||||
return ActionNodeExecutor(services)
|
||||
if nodeType.startswith("input.") and automation2_interface:
|
||||
return InputExecutor(automation2_interface)
|
||||
|
|
@ -62,12 +93,14 @@ async def executeGraph(
|
|||
initialNodeOutputs: Optional[Dict[str, Any]] = None,
|
||||
startAfterNodeId: Optional[str] = None,
|
||||
runId: Optional[str] = None,
|
||||
run_envelope: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute automation2 graph. Returns { success, nodeOutputs, error?, stopped? }.
|
||||
When an input node is reached and automation2_interface is provided, creates a task,
|
||||
pauses the run, and returns { success: False, paused: True, taskId, runId }.
|
||||
For resume: pass initialNodeOutputs (with result for the human node) and startAfterNodeId.
|
||||
For fresh runs: pass run_envelope (unified start payload for the start node); normalized with userId into context.runEnvelope.
|
||||
"""
|
||||
logger.info(
|
||||
"executeGraph start: instanceId=%s workflowId=%s userId=%s mandateId=%s resume=%s",
|
||||
|
|
@ -122,6 +155,8 @@ async def executeGraph(
|
|||
runId = run.get("id") if run else None
|
||||
logger.info("executeGraph created run %s", runId)
|
||||
|
||||
env_for_run = normalize_run_envelope(run_envelope, user_id=userId)
|
||||
|
||||
context = {
|
||||
"workflowId": workflowId,
|
||||
"instanceId": instanceId,
|
||||
|
|
@ -133,19 +168,76 @@ async def executeGraph(
|
|||
"services": services,
|
||||
"_runId": runId,
|
||||
"_orderedNodes": ordered,
|
||||
"runEnvelope": env_for_run,
|
||||
}
|
||||
|
||||
skip_until_passed = bool(startAfterNodeId)
|
||||
processed_in_loop: Set[str] = set()
|
||||
|
||||
# Check for loop resume: run was paused inside a loop, we're resuming for next iteration
|
||||
run = automation2_interface.getRun(runId) if (runId and automation2_interface) else None
|
||||
loop_resume_state = (run.get("context") or {}).get("_loopState") if run else None
|
||||
if loop_resume_state and startAfterNodeId:
|
||||
loop_node_id = loop_resume_state.get("loopNodeId")
|
||||
next_index = loop_resume_state.get("currentIndex", -1) + 1
|
||||
items = loop_resume_state.get("items") or []
|
||||
body_ids = getLoopBodyNodeIds(loop_node_id, connectionMap) if loop_node_id else set()
|
||||
body_ordered = [n for n in ordered if n.get("id") in body_ids]
|
||||
processed_in_loop = set(body_ids) | {loop_node_id} if loop_node_id else set()
|
||||
while next_index < len(items) and loop_node_id:
|
||||
nodeOutputs[loop_node_id] = {
|
||||
"items": items,
|
||||
"count": len(items),
|
||||
"currentItem": items[next_index],
|
||||
"currentIndex": next_index,
|
||||
}
|
||||
context["_loopState"] = {"loopNodeId": loop_node_id, "currentIndex": next_index, "items": items}
|
||||
for body_node in body_ordered:
|
||||
bnid = body_node.get("id")
|
||||
if not bnid or context.get("_stopped"):
|
||||
break
|
||||
if not _is_node_on_active_path(bnid, connectionMap, nodeOutputs):
|
||||
continue
|
||||
executor = _getExecutor(body_node.get("type", ""), services, automation2_interface)
|
||||
if not executor:
|
||||
nodeOutputs[bnid] = None
|
||||
continue
|
||||
try:
|
||||
result = await executor.execute(body_node, context)
|
||||
nodeOutputs[bnid] = result
|
||||
logger.info("executeGraph loop resume body node %s done (iter %d)", bnid, next_index)
|
||||
except PauseForHumanTaskError as e:
|
||||
if automation2_interface:
|
||||
run_ctx = dict(run.get("context") or {})
|
||||
run_ctx["_loopState"] = {"loopNodeId": loop_node_id, "currentIndex": next_index, "items": items}
|
||||
automation2_interface.updateRun(e.runId, status="paused", nodeOutputs=dict(nodeOutputs), currentNodeId=e.nodeId, context=run_ctx)
|
||||
return {"success": False, "paused": True, "taskId": e.taskId, "runId": e.runId, "nodeId": e.nodeId, "nodeOutputs": dict(nodeOutputs)}
|
||||
except Exception as ex:
|
||||
logger.exception("executeGraph loop body node %s FAILED: %s", bnid, ex)
|
||||
nodeOutputs[bnid] = {"error": str(ex), "success": False}
|
||||
if runId and automation2_interface:
|
||||
automation2_interface.updateRun(runId, status="failed", nodeOutputs=nodeOutputs)
|
||||
return {"success": False, "error": str(ex), "nodeOutputs": nodeOutputs, "failedNode": bnid}
|
||||
next_index += 1
|
||||
if loop_node_id:
|
||||
nodeOutputs[loop_node_id] = {"items": items, "count": len(items)}
|
||||
processed_in_loop = set(body_ids) | {loop_node_id}
|
||||
|
||||
for i, node in enumerate(ordered):
|
||||
if skip_until_passed:
|
||||
if node.get("id") == startAfterNodeId:
|
||||
skip_until_passed = False
|
||||
continue
|
||||
if node.get("id") in processed_in_loop:
|
||||
continue
|
||||
if context.get("_stopped"):
|
||||
logger.info("executeGraph stopped early (flow.stop) at step %d", i)
|
||||
logger.info("executeGraph stopped early at step %d", i)
|
||||
break
|
||||
nodeId = node.get("id")
|
||||
nodeType = node.get("type", "")
|
||||
if not _is_node_on_active_path(nodeId, connectionMap, nodeOutputs):
|
||||
logger.info("executeGraph step %d/%d: nodeId=%s SKIP (inactive branch)", i + 1, len(ordered), nodeId)
|
||||
continue
|
||||
executor = _getExecutor(nodeType, services, automation2_interface)
|
||||
logger.info(
|
||||
"executeGraph step %d/%d: nodeId=%s nodeType=%s executor=%s",
|
||||
|
|
@ -160,14 +252,54 @@ async def executeGraph(
|
|||
logger.debug("executeGraph node %s: no executor, output=None", nodeId)
|
||||
continue
|
||||
try:
|
||||
result = await executor.execute(node, context)
|
||||
nodeOutputs[nodeId] = result
|
||||
logger.info(
|
||||
"executeGraph node %s done: result_type=%s result_keys=%s",
|
||||
nodeId,
|
||||
type(result).__name__,
|
||||
list(result.keys()) if isinstance(result, dict) else "n/a",
|
||||
)
|
||||
if nodeType == "flow.loop":
|
||||
result = await executor.execute(node, context)
|
||||
items = result.get("items") or []
|
||||
body_ids = getLoopBodyNodeIds(nodeId, connectionMap)
|
||||
body_ordered = [n for n in ordered if n.get("id") in body_ids]
|
||||
processed_in_loop.update(body_ids)
|
||||
processed_in_loop.add(nodeId)
|
||||
for idx, item in enumerate(items):
|
||||
nodeOutputs[nodeId] = {"items": items, "count": len(items), "currentItem": item, "currentIndex": idx}
|
||||
context["_loopState"] = {"loopNodeId": nodeId, "currentIndex": idx, "items": items}
|
||||
for body_node in body_ordered:
|
||||
bnid = body_node.get("id")
|
||||
if not bnid or context.get("_stopped"):
|
||||
break
|
||||
if not _is_node_on_active_path(bnid, connectionMap, nodeOutputs):
|
||||
continue
|
||||
bexec = _getExecutor(body_node.get("type", ""), services, automation2_interface)
|
||||
if not bexec:
|
||||
nodeOutputs[bnid] = None
|
||||
continue
|
||||
try:
|
||||
bres = await bexec.execute(body_node, context)
|
||||
nodeOutputs[bnid] = bres
|
||||
logger.info("executeGraph loop body node %s done (iter %d)", bnid, idx)
|
||||
except PauseForHumanTaskError as e:
|
||||
if runId and automation2_interface:
|
||||
run = automation2_interface.getRun(runId) or {}
|
||||
run_ctx = dict(run.get("context") or {})
|
||||
run_ctx["_loopState"] = {"loopNodeId": nodeId, "currentIndex": idx, "items": items}
|
||||
automation2_interface.updateRun(e.runId, status="paused", nodeOutputs=dict(nodeOutputs), currentNodeId=e.nodeId, context=run_ctx)
|
||||
return {"success": False, "paused": True, "taskId": e.taskId, "runId": e.runId, "nodeId": e.nodeId, "nodeOutputs": dict(nodeOutputs)}
|
||||
except Exception as ex:
|
||||
logger.exception("executeGraph loop body node %s FAILED: %s", bnid, ex)
|
||||
nodeOutputs[bnid] = {"error": str(ex), "success": False}
|
||||
if runId and automation2_interface:
|
||||
automation2_interface.updateRun(runId, status="failed", nodeOutputs=nodeOutputs)
|
||||
return {"success": False, "error": str(ex), "nodeOutputs": nodeOutputs, "failedNode": bnid}
|
||||
nodeOutputs[nodeId] = {"items": items, "count": len(items)}
|
||||
logger.info("executeGraph flow.loop done: %d iterations", len(items))
|
||||
else:
|
||||
result = await executor.execute(node, context)
|
||||
nodeOutputs[nodeId] = result
|
||||
logger.info(
|
||||
"executeGraph node %s done: result_type=%s result_keys=%s",
|
||||
nodeId,
|
||||
type(result).__name__,
|
||||
list(result.keys()) if isinstance(result, dict) else "n/a",
|
||||
)
|
||||
except PauseForHumanTaskError as e:
|
||||
logger.info("executeGraph paused for human task %s", e.taskId)
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -3,14 +3,12 @@
|
|||
|
||||
from .triggerExecutor import TriggerExecutor
|
||||
from .flowExecutor import FlowExecutor
|
||||
from .dataExecutor import DataExecutor
|
||||
from .actionNodeExecutor import ActionNodeExecutor
|
||||
from .inputExecutor import InputExecutor, PauseForHumanTaskError, PauseForEmailWaitError
|
||||
|
||||
__all__ = [
|
||||
"TriggerExecutor",
|
||||
"FlowExecutor",
|
||||
"DataExecutor",
|
||||
"ActionNodeExecutor",
|
||||
"InputExecutor",
|
||||
"PauseForHumanTaskError",
|
||||
|
|
|
|||
|
|
@ -1,11 +1,31 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Action node executor - maps ai.*, email.*, sharepoint.* to method actions via ActionExecutor.
|
||||
# Action node executor - maps ai.*, email.*, sharepoint.*, clickup.* to method actions via ActionExecutor.
|
||||
#
|
||||
# Unified handover format for all nodes:
|
||||
# - Node output: { success, error?, documents, documentList, data } – documents and documentList are identical
|
||||
# - Input merge: downstream receives documents via _getDocumentsFromUpstream(inp) – reads documents or documentList
|
||||
# - Incoming email handover: (context, documentList, reply_to, subject) via _formatEmailOutputAsContext / _unpackIncomingEmail
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# UserConnection.id (UUID) when connectionId could not be mapped to connection:authority:username
|
||||
_USER_CONNECTION_ID_RE = re.compile(
|
||||
r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def _is_user_connection_id(val: Any) -> bool:
|
||||
if val is None or isinstance(val, (dict, list)):
|
||||
return False
|
||||
s = str(val).strip()
|
||||
return bool(_USER_CONNECTION_ID_RE.match(s))
|
||||
|
||||
|
||||
def _getNodeDefinition(nodeType: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get node definition by type id for _method, _action, _paramMap."""
|
||||
|
|
@ -63,11 +83,12 @@ def _extractEmailContentFromUpstream(inp: Any) -> Optional[Dict[str, Any]]:
|
|||
"""
|
||||
Extract {subject, body, to} from upstream node output (e.g. AI node returning JSON).
|
||||
Expects JSON like {"subject": "...", "body": "...", "to": "..."} in documentData.
|
||||
Uses unified handover: documents/documentList.
|
||||
"""
|
||||
if not inp:
|
||||
return None
|
||||
import json
|
||||
docs = inp.get("documents", inp.get("documentList", [])) if isinstance(inp, dict) else []
|
||||
docs = _getDocumentsFromUpstream(inp)
|
||||
if not docs:
|
||||
return None
|
||||
doc = docs[0] if isinstance(docs, list) else docs
|
||||
|
|
@ -92,15 +113,12 @@ def _extractContextFromUpstream(inp: Any) -> Optional[str]:
|
|||
Extract plain text context from upstream node output (e.g. AI node returning txt).
|
||||
Use when _extractEmailContentFromUpstream returns None – the generated document content
|
||||
(email body, summary, etc.) should be passed as context to email.draftEmail.
|
||||
Uses unified handover: documents/documentList.
|
||||
"""
|
||||
if not inp:
|
||||
return None
|
||||
docs = None
|
||||
if isinstance(inp, dict):
|
||||
docs = inp.get("documents") or inp.get("documentList")
|
||||
if not docs and isinstance(inp.get("data"), dict):
|
||||
docs = inp.get("data", {}).get("documents")
|
||||
if not docs or not isinstance(docs, (list, tuple)):
|
||||
docs = _getDocumentsFromUpstream(inp)
|
||||
if not docs:
|
||||
return None
|
||||
doc = docs[0] if docs else None
|
||||
if not doc:
|
||||
|
|
@ -114,6 +132,63 @@ def _extractContextFromUpstream(inp: Any) -> Optional[str]:
|
|||
return s if s else None
|
||||
|
||||
|
||||
def _payloadToContext(payload: Any) -> Optional[str]:
|
||||
"""Convert payload (e.g. from form) to readable text for document context."""
|
||||
if payload is None:
|
||||
return None
|
||||
if isinstance(payload, str) and payload.strip():
|
||||
return payload.strip()
|
||||
if isinstance(payload, dict):
|
||||
try:
|
||||
import json
|
||||
return json.dumps(payload, ensure_ascii=False, indent=2)
|
||||
except (TypeError, ValueError):
|
||||
lines = [f"{k}: {v}" for k, v in payload.items()]
|
||||
return "\n".join(lines) if lines else None
|
||||
return str(payload).strip() if str(payload).strip() else None
|
||||
|
||||
|
||||
def _getContextFromUpstream(out: Any) -> Optional[str]:
|
||||
"""
|
||||
Get context from upstream node output. Prefers explicit 'context' field;
|
||||
falls back to documents/documentList (first doc's documentData), then payload.
|
||||
Handles: AI (context), form (payload or top-level field dict), upload (document refs).
|
||||
"""
|
||||
if not out or not isinstance(out, dict):
|
||||
return None
|
||||
ctx = out.get("context")
|
||||
if isinstance(ctx, str) and ctx.strip():
|
||||
return ctx.strip()
|
||||
doc_ctx = _extractContextFromUpstream(out)
|
||||
if doc_ctx:
|
||||
return doc_ctx
|
||||
payload = out.get("payload")
|
||||
if payload is not None:
|
||||
return _payloadToContext(payload)
|
||||
if "documents" not in out and "documentList" not in out and "success" not in out:
|
||||
return _payloadToContext(out)
|
||||
return None
|
||||
|
||||
|
||||
def _extractContextFromResult(result: Any) -> Optional[str]:
|
||||
"""
|
||||
Extract plain text context from ActionResult (ActionExecutor result).
|
||||
Used to populate 'context' in unified output for AI nodes.
|
||||
"""
|
||||
if not result or not hasattr(result, "documents"):
|
||||
return None
|
||||
docs = result.documents or []
|
||||
if not docs:
|
||||
return None
|
||||
doc = docs[0]
|
||||
raw = getattr(doc, "documentData", None) if hasattr(doc, "documentData") else (doc.get("documentData") if isinstance(doc, dict) else None)
|
||||
if not raw:
|
||||
return None
|
||||
if isinstance(raw, bytes):
|
||||
return raw.decode("utf-8", errors="replace").strip()
|
||||
return str(raw).strip() if str(raw).strip() else None
|
||||
|
||||
|
||||
def _gatherAttachmentDocumentsFromUpstream(
|
||||
nodeId: str,
|
||||
inputSources: Dict[str, Dict[int, tuple]],
|
||||
|
|
@ -140,7 +215,7 @@ def _gatherAttachmentDocumentsFromUpstream(
|
|||
|
||||
if srcType in ("sharepoint.downloadFile", "sharepoint.readFile"):
|
||||
if isinstance(out, dict):
|
||||
for d in out.get("documents") or out.get("documentList") or []:
|
||||
for d in _getDocumentsFromUpstream(out):
|
||||
if isinstance(d, dict) and (d.get("documentData") or (d.get("validationMetadata") or {}).get("fileId")):
|
||||
docs.append(d)
|
||||
elif hasattr(d, "documentData") or (getattr(d, "validationMetadata", None) or {}).get("fileId"):
|
||||
|
|
@ -152,6 +227,62 @@ def _gatherAttachmentDocumentsFromUpstream(
|
|||
return docs
|
||||
|
||||
|
||||
def _getDocumentsFromUpstream(out: Any) -> list:
|
||||
"""Unified: extract documents list from any node output.
|
||||
Supports: documents, documentList, data.documents.
|
||||
Also: input.upload result format { file, files, fileIds } - converts to doc refs with validationMetadata.fileId.
|
||||
"""
|
||||
if not out or not isinstance(out, dict):
|
||||
return []
|
||||
docs = out.get("documents") or out.get("documentList")
|
||||
if not docs and isinstance(out.get("data"), dict):
|
||||
docs = out.get("data", {}).get("documents") or out.get("data", {}).get("documentList")
|
||||
if not docs:
|
||||
# input.upload task result: { file: {id, fileName}, files: [...], fileIds: [...] }
|
||||
def _file_to_doc(f: Any) -> Optional[Dict[str, Any]]:
|
||||
if isinstance(f, dict):
|
||||
fid = f.get("id")
|
||||
fname = f.get("fileName") or f.get("filename") or "file"
|
||||
if fid:
|
||||
return {
|
||||
"documentName": fname,
|
||||
"fileName": fname,
|
||||
"validationMetadata": {"fileId": str(fid)},
|
||||
}
|
||||
elif isinstance(f, str):
|
||||
return {"documentName": "file", "fileName": "file", "validationMetadata": {"fileId": f}}
|
||||
return None
|
||||
|
||||
file_obj = out.get("file")
|
||||
files_arr = out.get("files") or []
|
||||
file_ids = out.get("fileIds") or []
|
||||
if file_obj:
|
||||
d = _file_to_doc(file_obj)
|
||||
if d:
|
||||
docs = [d]
|
||||
if not docs and files_arr:
|
||||
docs = [d for f in files_arr for d in [_file_to_doc(f)] if d]
|
||||
if not docs and file_ids:
|
||||
docs = [_file_to_doc(fid) for fid in file_ids if _file_to_doc(fid)]
|
||||
if not docs:
|
||||
return []
|
||||
return docs if isinstance(docs, (list, tuple)) else [docs]
|
||||
|
||||
|
||||
def _unpackIncomingEmail(incoming: Optional[tuple]) -> Optional[tuple]:
|
||||
"""
|
||||
Unified handover: (context, documentList, reply_to, subject).
|
||||
Returns (ctx, doc_list, reply_to, subject) or None.
|
||||
"""
|
||||
if not incoming or not isinstance(incoming, (list, tuple)):
|
||||
return None
|
||||
ctx = incoming[0] if len(incoming) > 0 else None
|
||||
doc_list = incoming[1] if len(incoming) > 1 else []
|
||||
reply_to = incoming[2] if len(incoming) > 2 else None
|
||||
subject = incoming[3] if len(incoming) > 3 else ""
|
||||
return (ctx, doc_list or [], reply_to, subject)
|
||||
|
||||
|
||||
def _getIncomingEmailFromUpstream(
|
||||
nodeId: str,
|
||||
inputSources: Dict[str, Dict[int, tuple]],
|
||||
|
|
@ -189,12 +320,14 @@ def _getIncomingEmailFromUpstream(
|
|||
|
||||
|
||||
def _formatEmailOutputAsContext(out: Any) -> Optional[tuple]:
|
||||
"""Format email node output as (context, documentList, reply_to) for composeAndDraftEmail.
|
||||
"""Format email node output as (context, documentList, reply_to, subject) for composeAndDraftEmail.
|
||||
reply_to = sender address of first email (recipient for the reply).
|
||||
subject = original subject (for Re: prefix).
|
||||
Returns unified handover: (text, files/docs, reply_to, subject).
|
||||
"""
|
||||
if not out:
|
||||
return None
|
||||
docs = out.get("documents", out.get("documentList", [])) if isinstance(out, dict) else []
|
||||
docs = _getDocumentsFromUpstream(out)
|
||||
if not docs:
|
||||
return None
|
||||
doc = docs[0] if isinstance(docs, list) else docs
|
||||
|
|
@ -217,6 +350,7 @@ def _formatEmailOutputAsContext(out: Any) -> Optional[tuple]:
|
|||
if not emails_list:
|
||||
return None
|
||||
reply_to = None
|
||||
first_subject = ""
|
||||
parts = ["Reply to the following email(s):", ""]
|
||||
for i, em in enumerate(emails_list[:5]): # max 5
|
||||
if not isinstance(em, dict):
|
||||
|
|
@ -227,6 +361,8 @@ def _formatEmailOutputAsContext(out: Any) -> Optional[tuple]:
|
|||
if from_str and not reply_to:
|
||||
reply_to = addr.get("address", "") or from_str
|
||||
subj = em.get("subject", "")
|
||||
if subj and not first_subject:
|
||||
first_subject = subj
|
||||
body = em.get("bodyPreview", "") or (em.get("body") or {}).get("content", "") if isinstance(em.get("body"), dict) else ""
|
||||
if body and len(str(body)) > 1500:
|
||||
body = str(body)[:1500] + "..."
|
||||
|
|
@ -238,7 +374,7 @@ def _formatEmailOutputAsContext(out: Any) -> Optional[tuple]:
|
|||
parts.insert(2, f"Recipient (reply to this address): {reply_to}")
|
||||
parts.insert(3, "")
|
||||
context = "\n".join(parts).strip()
|
||||
return (context, docs, reply_to)
|
||||
return (context, docs, reply_to, first_subject)
|
||||
|
||||
|
||||
def _buildSearchQuery(
|
||||
|
|
@ -349,6 +485,10 @@ def _buildActionParams(
|
|||
ref = _resolveConnectionIdToReference(chatService, connId, services)
|
||||
if ref:
|
||||
params["connectionReference"] = ref
|
||||
elif _is_user_connection_id(connId):
|
||||
# Automation2 worker often has no chat user connection list; pass UUID through —
|
||||
# method helpers (e.g. ClickupConnectionHelper) resolve via interfaceDbApp.getUserConnectionById.
|
||||
params["connectionReference"] = str(connId).strip()
|
||||
else:
|
||||
logger.warning(f"Could not resolve connectionId {connId} to connectionReference")
|
||||
params.pop("connectionId", None)
|
||||
|
|
@ -384,7 +524,7 @@ def _buildActionParams(
|
|||
|
||||
|
||||
class ActionNodeExecutor:
|
||||
"""Execute ai.*, email.*, sharepoint.* nodes by mapping to method actions."""
|
||||
"""Execute ai.*, email.*, sharepoint.*, clickup.* nodes by mapping to method actions."""
|
||||
|
||||
def __init__(self, services: Any):
|
||||
self.services = services
|
||||
|
|
@ -414,16 +554,57 @@ class ActionNodeExecutor:
|
|||
nodeDef = _getNodeDefinition(nodeType)
|
||||
params = dict(node.get("parameters") or {})
|
||||
resolvedParams = resolveParameterReferences(params, context.get("nodeOutputs", {}))
|
||||
if nodeType == "clickup.updateTask":
|
||||
from modules.workflows.automation2.clickupTaskUpdateMerge import merge_clickup_task_update_entries
|
||||
|
||||
# Merge input from connected nodes (documentList, etc.)
|
||||
merge_clickup_task_update_entries(resolvedParams)
|
||||
|
||||
# Merge input from connected nodes (unified handover: documents/documentList, context)
|
||||
inputSources = context.get("inputSources", {}).get(nodeId, {})
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = context.get("nodeOutputs", {}).get(srcId)
|
||||
if isinstance(inp, dict):
|
||||
resolvedParams.setdefault("documentList", inp.get("documents", inp.get("documentList", [])))
|
||||
docs = _getDocumentsFromUpstream(inp) if isinstance(inp, dict) else []
|
||||
if docs:
|
||||
resolvedParams.setdefault("documentList", docs)
|
||||
elif inp is not None:
|
||||
resolvedParams.setdefault("input", inp)
|
||||
# file.create: build context from contentSources (concatenated) or fallback to upstream
|
||||
if nodeType == "file.create":
|
||||
sources = resolvedParams.get("contentSources")
|
||||
if not isinstance(sources, list):
|
||||
sources = [resolvedParams.get("contentSource")] if resolvedParams.get("contentSource") else []
|
||||
parts = []
|
||||
for s in sources:
|
||||
if s is None or s == "":
|
||||
continue
|
||||
if isinstance(s, str):
|
||||
txt = s.strip()
|
||||
elif isinstance(s, dict):
|
||||
txt = _payloadToContext(s) if s else ""
|
||||
else:
|
||||
txt = str(s)
|
||||
if txt:
|
||||
parts.append(txt)
|
||||
upstream_context = _getContextFromUpstream(inp)
|
||||
if parts:
|
||||
parts_joined = "\n\n".join(parts)
|
||||
# When upstream is AI and user only selected prompt, use full context (prompt + response)
|
||||
if (
|
||||
isinstance(inp, dict)
|
||||
and upstream_context
|
||||
and len(upstream_context) > len(parts_joined)
|
||||
):
|
||||
prompt_only = (inp.get("prompt") or "").strip()
|
||||
if prompt_only and parts_joined.strip() == prompt_only:
|
||||
resolvedParams["context"] = upstream_context
|
||||
else:
|
||||
resolvedParams["context"] = parts_joined
|
||||
else:
|
||||
resolvedParams["context"] = parts_joined
|
||||
else:
|
||||
if upstream_context:
|
||||
resolvedParams["context"] = upstream_context
|
||||
|
||||
# ai.prompt with email upstream: inject actual email content into prompt so AI has context
|
||||
# (getChatDocumentsFromDocumentList fails in automation2 – workflow has no messages)
|
||||
|
|
@ -434,17 +615,20 @@ class ActionNodeExecutor:
|
|||
srcNode = next((n for n in orderedNodes if n.get("id") == srcId), None)
|
||||
srcType = (srcNode or {}).get("type", "")
|
||||
if srcType in ("email.checkEmail", "email.searchEmail"):
|
||||
incoming = _getIncomingEmailFromUpstream(
|
||||
incoming = _unpackIncomingEmail(_getIncomingEmailFromUpstream(
|
||||
nodeId,
|
||||
context.get("inputSources", {}),
|
||||
context.get("nodeOutputs", {}),
|
||||
orderedNodes,
|
||||
)
|
||||
))
|
||||
if incoming:
|
||||
ctx, _doc_list, _reply_to = incoming
|
||||
ctx, _doc_list, _reply_to, _ = incoming
|
||||
if ctx and ctx.strip():
|
||||
base_prompt = (resolvedParams.get("aiPrompt") or "").strip()
|
||||
resolvedParams["aiPrompt"] = (
|
||||
# Set "prompt" so _paramMap (prompt→aiPrompt) passes it through to ai.process
|
||||
base_prompt = (
|
||||
(resolvedParams.get("prompt") or resolvedParams.get("aiPrompt") or "")
|
||||
).strip()
|
||||
resolvedParams["prompt"] = (
|
||||
f"Eingehende E-Mail:\n{ctx}\n\nAufgabe: {base_prompt}"
|
||||
if base_prompt
|
||||
else f"Eingehende E-Mail:\n{ctx}"
|
||||
|
|
@ -454,6 +638,11 @@ class ActionNodeExecutor:
|
|||
chatService = getattr(self.services, "chat", None)
|
||||
actionParams = _buildActionParams(node, nodeDef or {}, resolvedParams, chatService, self.services)
|
||||
|
||||
# ai.prompt: use simpleMode by default – direct AI call, no document pipeline (chapters/sections)
|
||||
# For short prompts like "formuliere eine passende email" this avoids ~13 AI calls and verbose output
|
||||
if nodeType == "ai.prompt" and "simpleMode" not in actionParams:
|
||||
actionParams["simpleMode"] = True
|
||||
|
||||
# email.checkEmail: pause and wait for new email (background poller will resume)
|
||||
if nodeType == "email.checkEmail":
|
||||
runId = context.get("_runId")
|
||||
|
|
@ -492,9 +681,26 @@ class ActionNodeExecutor:
|
|||
if srcType.startswith("ai."):
|
||||
inp = nodeOutputs.get(srcId)
|
||||
email_content = _extractEmailContentFromUpstream(inp)
|
||||
# Reply flow: get incoming email metadata (replyTo, subject, original docs) when email->AI->draft
|
||||
incoming = _unpackIncomingEmail(_getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes))
|
||||
reply_to = None
|
||||
reply_subject = None
|
||||
reply_docs = []
|
||||
if incoming:
|
||||
inc_ctx, doc_list, reply_to, first_subject = incoming
|
||||
reply_docs = doc_list
|
||||
reply_subject = ("Re: " + first_subject) if first_subject else None
|
||||
if email_content:
|
||||
actionParams["emailContent"] = email_content
|
||||
actionParams["context"] = email_content.get("body", "") or "(from connected AI node)"
|
||||
# Merge reply metadata when available
|
||||
merged = dict(email_content)
|
||||
if reply_to and not merged.get("to"):
|
||||
merged["to"] = reply_to if isinstance(reply_to, list) else [reply_to]
|
||||
if reply_subject and not merged.get("subject"):
|
||||
merged["subject"] = reply_subject
|
||||
actionParams["emailContent"] = merged
|
||||
actionParams["context"] = merged.get("body", "") or "(from connected AI node)"
|
||||
if reply_docs:
|
||||
actionParams["replySourceDocuments"] = reply_docs
|
||||
# Attachments: gather from file nodes upstream of AI (e.g. downloadFile -> AI -> email)
|
||||
attachment_docs = _gatherAttachmentDocumentsFromUpstream(
|
||||
nodeId, inputSources, nodeOutputs, orderedNodes
|
||||
|
|
@ -514,27 +720,44 @@ class ActionNodeExecutor:
|
|||
extra = [x for x in (existing if isinstance(existing, list) else []) if _is_binary_attachment(x)]
|
||||
actionParams["documentList"] = attachment_docs + extra
|
||||
if not email_content:
|
||||
# AI returns plain text (e.g. email.txt): use as email body directly (no extra AI call)
|
||||
ctx = _extractContextFromUpstream(inp)
|
||||
# AI returns plain text or context: use as email body directly (no extra AI call)
|
||||
ctx = _getContextFromUpstream(inp)
|
||||
if ctx:
|
||||
# Reply flow: get incoming email metadata (replyTo, subject, original docs)
|
||||
incoming = _unpackIncomingEmail(_getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes))
|
||||
reply_to = None
|
||||
reply_subject = None
|
||||
reply_docs = []
|
||||
if incoming:
|
||||
inc_ctx, doc_list, reply_to, first_subject = incoming
|
||||
reply_docs = doc_list
|
||||
reply_subject = ("Re: " + first_subject) if first_subject else None
|
||||
actionParams["emailContent"] = {
|
||||
"subject": actionParams.get("subject", "Draft"),
|
||||
"subject": reply_subject or actionParams.get("subject", "Draft"),
|
||||
"body": ctx,
|
||||
"to": actionParams.get("to"),
|
||||
"to": [reply_to] if reply_to else (actionParams.get("to") or []),
|
||||
}
|
||||
actionParams["context"] = ctx
|
||||
if reply_to and not actionParams.get("to"):
|
||||
actionParams["to"] = [reply_to]
|
||||
# Reply flow: attach original email(s) for proper reply
|
||||
if reply_docs:
|
||||
actionParams["replySourceDocuments"] = reply_docs
|
||||
else:
|
||||
# Fallback: incoming email from upstream (if flow is email->AI->draft)
|
||||
incoming = _getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes)
|
||||
# Fallback: incoming email from upstream (AI returned nothing usable)
|
||||
incoming = _unpackIncomingEmail(_getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes))
|
||||
if incoming:
|
||||
ctx, doc_list, reply_to = incoming
|
||||
actionParams["context"] = ctx
|
||||
inc_ctx, doc_list, reply_to, first_subject = incoming
|
||||
actionParams["context"] = inc_ctx
|
||||
if doc_list and not actionParams.get("documentList"):
|
||||
actionParams["documentList"] = doc_list
|
||||
if reply_to and not actionParams.get("to"):
|
||||
actionParams["to"] = [reply_to]
|
||||
if first_subject and not actionParams.get("subject"):
|
||||
actionParams["subject"] = "Re: " + first_subject
|
||||
actionParams["replySourceDocuments"] = doc_list
|
||||
else:
|
||||
doc_count = len(inp.get("documents", [])) if isinstance(inp, dict) else 0
|
||||
doc_count = len(_getDocumentsFromUpstream(inp))
|
||||
logger.warning(
|
||||
"email.draftEmail: AI upstream returned %d doc(s) but context extraction failed (no subject/body, no plain text). "
|
||||
"Ensure AI node outputs document with documentData.",
|
||||
|
|
@ -545,7 +768,7 @@ class ActionNodeExecutor:
|
|||
# File itself is the context: pass as attachment, use filename as minimal context (no content extraction)
|
||||
if not actionParams.get("context"):
|
||||
inp = nodeOutputs.get(srcId)
|
||||
docs = (inp.get("documents") or inp.get("documentList", [])) if isinstance(inp, dict) else []
|
||||
docs = _getDocumentsFromUpstream(inp)
|
||||
doc = docs[0] if docs else None
|
||||
name = None
|
||||
if isinstance(doc, dict):
|
||||
|
|
@ -563,32 +786,71 @@ class ActionNodeExecutor:
|
|||
else:
|
||||
# Direct connection to email.checkEmail/searchEmail: use incoming email as context
|
||||
if not actionParams.get("context"):
|
||||
incoming = _getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes)
|
||||
incoming = _unpackIncomingEmail(_getIncomingEmailFromUpstream(nodeId, inputSources, nodeOutputs, orderedNodes))
|
||||
if incoming:
|
||||
ctx, doc_list, reply_to = incoming
|
||||
actionParams["context"] = ctx
|
||||
inc_ctx, doc_list, reply_to, first_subject = incoming
|
||||
actionParams["context"] = inc_ctx
|
||||
if doc_list and not actionParams.get("documentList"):
|
||||
actionParams["documentList"] = doc_list
|
||||
if reply_to and not actionParams.get("to"):
|
||||
actionParams["to"] = [reply_to]
|
||||
if first_subject and not actionParams.get("subject"):
|
||||
actionParams["subject"] = "Re: " + first_subject
|
||||
actionParams["replySourceDocuments"] = doc_list
|
||||
|
||||
# Generic context handover: when upstream provides documents, pass first doc as content for actions that expect it
|
||||
docList = actionParams.get("documentList") or resolvedParams.get("documentList")
|
||||
if docList and "content" not in actionParams:
|
||||
first = docList[0] if isinstance(docList, list) and docList else docList
|
||||
# Actions like sharepoint.uploadFile consume content from context
|
||||
# Actions like sharepoint.uploadFile / clickup.uploadAttachment consume content from context
|
||||
actionParams["content"] = first
|
||||
|
||||
executor = ActionExecutor(self.services)
|
||||
logger.info("ActionNodeExecutor node %s calling executeAction(%s, %s)", nodeId, methodName, actionName)
|
||||
result = await executor.executeAction(methodName, actionName, actionParams)
|
||||
|
||||
# Extract context from result for unified output (AI text for downstream file nodes)
|
||||
extracted_context = _extractContextFromResult(result) if result else None
|
||||
|
||||
# AI nodes: include prompt in output; context = prompt + AI response (für file.create etc.)
|
||||
prompt_text = (resolvedParams.get("prompt") or resolvedParams.get("aiPrompt") or "")
|
||||
if not isinstance(prompt_text, str):
|
||||
prompt_text = str(prompt_text) if prompt_text else ""
|
||||
prompt_text = (prompt_text or "").strip()
|
||||
if nodeType.startswith("ai.") and prompt_text:
|
||||
full_context = (
|
||||
f"{prompt_text}\n\n{extracted_context}" if extracted_context else prompt_text
|
||||
)
|
||||
else:
|
||||
full_context = extracted_context or ""
|
||||
out_prompt = prompt_text if nodeType.startswith("ai.") else ""
|
||||
|
||||
docs_list = [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])]
|
||||
|
||||
# result = AI response text (for contentSources refs: prompt + context + result = full output, optionally duplicated)
|
||||
out_result = extracted_context if nodeType.startswith("ai.") else None
|
||||
|
||||
out = {
|
||||
"success": result.success,
|
||||
"error": result.error,
|
||||
"documents": [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])],
|
||||
"documents": docs_list,
|
||||
"documentList": docs_list,
|
||||
"prompt": out_prompt,
|
||||
"context": full_context,
|
||||
"result": out_result,
|
||||
"data": result.model_dump() if hasattr(result, "model_dump") else {"success": result.success, "error": result.error},
|
||||
}
|
||||
if result.success and docs_list and nodeType.startswith("clickup."):
|
||||
try:
|
||||
d0 = docs_list[0] if isinstance(docs_list[0], dict) else {}
|
||||
raw = d0.get("documentData")
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
parsed = json.loads(raw)
|
||||
if isinstance(parsed, dict) and parsed.get("id") is not None:
|
||||
out["taskId"] = str(parsed["id"])
|
||||
out["clickupTask"] = parsed
|
||||
except (json.JSONDecodeError, TypeError, ValueError):
|
||||
pass
|
||||
logger.info(
|
||||
"ActionNodeExecutor node %s result: success=%s error=%s doc_count=%d",
|
||||
nodeId,
|
||||
|
|
|
|||
|
|
@ -1,120 +0,0 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Data transformation node executor (setFields, filter, parseJson, template).
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Dict, Any, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_nested(obj: Any, path: str) -> Any:
|
||||
"""Get nested key from obj, e.g. 'data.items'."""
|
||||
for k in path.split("."):
|
||||
if not k:
|
||||
continue
|
||||
if isinstance(obj, dict) and k in obj:
|
||||
obj = obj[k]
|
||||
elif isinstance(obj, (list, tuple)) and k.isdigit():
|
||||
obj = obj[int(k)]
|
||||
else:
|
||||
return None
|
||||
return obj
|
||||
|
||||
|
||||
class DataExecutor:
|
||||
"""Execute data transformation nodes."""
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
nodeType = node.get("type", "")
|
||||
nodeOutputs = context.get("nodeOutputs", {})
|
||||
nodeId = node.get("id", "")
|
||||
inputSources = context.get("inputSources", {}).get(nodeId, {})
|
||||
params = node.get("parameters") or {}
|
||||
logger.info(
|
||||
"DataExecutor node %s type=%s inputSources=%s params=%s",
|
||||
nodeId,
|
||||
nodeType,
|
||||
inputSources,
|
||||
params,
|
||||
)
|
||||
|
||||
inp = None
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = nodeOutputs.get(srcId)
|
||||
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolvedParams = {k: resolveParameterReferences(v, nodeOutputs) for k, v in params.items()}
|
||||
|
||||
if nodeType == "data.setFields":
|
||||
out = self._setFields(inp, resolvedParams)
|
||||
logger.info("DataExecutor node %s setFields inp=%s -> %s", nodeId, type(inp).__name__, out)
|
||||
return out
|
||||
if nodeType == "data.filter":
|
||||
out = self._filter(inp, resolvedParams)
|
||||
logger.info("DataExecutor node %s filter inp=%s -> len=%d", nodeId, type(inp).__name__, len(out) if isinstance(out, list) else -1)
|
||||
return out
|
||||
if nodeType == "data.parseJson":
|
||||
out = self._parseJson(inp, resolvedParams)
|
||||
logger.info("DataExecutor node %s parseJson -> %s", nodeId, type(out).__name__)
|
||||
return out
|
||||
if nodeType == "data.template":
|
||||
out = self._template(inp, resolvedParams, nodeOutputs)
|
||||
logger.info("DataExecutor node %s template -> %s", nodeId, out)
|
||||
return out
|
||||
|
||||
logger.debug("DataExecutor node %s unhandled type %s -> passThrough", nodeId, nodeType)
|
||||
return inp
|
||||
|
||||
def _setFields(self, inp: Any, params: Dict) -> Any:
|
||||
fields = params.get("fields", {})
|
||||
if not isinstance(fields, dict):
|
||||
return inp
|
||||
base = dict(inp) if isinstance(inp, dict) else {}
|
||||
base.update(fields)
|
||||
return base
|
||||
|
||||
def _filter(self, inp: Any, params: Dict) -> Any:
|
||||
itemsPath = (params.get("itemsPath") or "").strip()
|
||||
condition = params.get("condition", "True")
|
||||
items = inp
|
||||
if itemsPath:
|
||||
items = _get_nested(inp, itemsPath)
|
||||
if not isinstance(items, list):
|
||||
items = [inp] if inp is not None else []
|
||||
out = []
|
||||
for i, item in enumerate(items):
|
||||
try:
|
||||
local = {"item": item, "index": i, "input": inp}
|
||||
ok = bool(eval(condition, {"__builtins__": {}}, local))
|
||||
if ok:
|
||||
out.append(item)
|
||||
except Exception:
|
||||
pass
|
||||
return out
|
||||
|
||||
def _parseJson(self, inp: Any, params: Dict) -> Any:
|
||||
jsonPath = (params.get("jsonPath") or "").strip()
|
||||
raw = inp
|
||||
if jsonPath:
|
||||
raw = _get_nested(inp, jsonPath) if isinstance(inp, dict) else inp
|
||||
if isinstance(raw, dict):
|
||||
return raw
|
||||
if isinstance(raw, str):
|
||||
try:
|
||||
return json.loads(raw)
|
||||
except json.JSONDecodeError:
|
||||
return {"error": "Invalid JSON", "raw": raw[:200]}
|
||||
return inp
|
||||
|
||||
def _template(self, inp: Any, params: Dict, nodeOutputs: Dict) -> Any:
|
||||
tpl = params.get("template", "")
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
result = resolveParameterReferences(tpl, nodeOutputs)
|
||||
return {"text": result, "template": tpl}
|
||||
|
|
@ -1,9 +1,8 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Flow control node executor (ifElse, merge, wait, stop).
|
||||
# Flow control node executor (ifElse, switch, loop).
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -33,18 +32,6 @@ class FlowExecutor:
|
|||
out = await self._ifElse(node, nodeOutputs, nodeId, inputSources)
|
||||
logger.info("FlowExecutor node %s ifElse -> %s", nodeId, out)
|
||||
return out
|
||||
if nodeType == "flow.merge":
|
||||
out = await self._merge(node, nodeOutputs, nodeId, inputSources)
|
||||
logger.info("FlowExecutor node %s merge -> %s", nodeId, out)
|
||||
return out
|
||||
if nodeType == "flow.wait":
|
||||
out = await self._wait(node, nodeOutputs, nodeId, inputSources)
|
||||
logger.info("FlowExecutor node %s wait -> %s", nodeId, out)
|
||||
return out
|
||||
if nodeType == "flow.stop":
|
||||
context["_stopped"] = True
|
||||
logger.info("FlowExecutor node %s -> STOP", nodeId)
|
||||
return {"stopped": True}
|
||||
if nodeType == "flow.switch":
|
||||
out = await self._switch(node, nodeOutputs, nodeId, inputSources)
|
||||
logger.info("FlowExecutor node %s switch -> %s", nodeId, out)
|
||||
|
|
@ -72,60 +59,142 @@ class FlowExecutor:
|
|||
nodeId: str,
|
||||
inputSources: Dict,
|
||||
) -> Any:
|
||||
condExpr = (node.get("parameters") or {}).get("condition", "")
|
||||
condParam = (node.get("parameters") or {}).get("condition")
|
||||
inp = self._getInputData(nodeId, {nodeId: inputSources}, nodeOutputs)
|
||||
# Simple eval - in production use safe evaluation
|
||||
try:
|
||||
# Replace {{nodeId}} refs with actual values
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolved = resolveParameterReferences(condExpr, nodeOutputs)
|
||||
# Minimal eval for simple comparisons (e.g. "True", "1 > 0")
|
||||
ok = bool(eval(resolved)) if resolved else False
|
||||
except Exception:
|
||||
ok = False
|
||||
ok = self._evalConditionParam(condParam, nodeOutputs)
|
||||
return {"branch": 0 if ok else 1, "conditionResult": ok, "input": inp}
|
||||
|
||||
async def _merge(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
mode = (node.get("parameters") or {}).get("mode", "append")
|
||||
sources = inputSources
|
||||
items = []
|
||||
for inpIdx in sorted(sources.keys()):
|
||||
srcId, _ = sources[inpIdx]
|
||||
data = nodeOutputs.get(srcId)
|
||||
if data is not None:
|
||||
if isinstance(data, list):
|
||||
items.extend(data)
|
||||
def _evalConditionParam(self, condParam: Any, nodeOutputs: Dict) -> bool:
|
||||
"""Evaluate condition: structured {type,ref,operator,value} or legacy string/ref."""
|
||||
if condParam is None:
|
||||
return False
|
||||
if isinstance(condParam, dict) and condParam.get("type") == "condition":
|
||||
return self._evalStructuredCondition(condParam, nodeOutputs)
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
resolved = resolveParameterReferences(condParam, nodeOutputs)
|
||||
return self._evalCondition(resolved)
|
||||
|
||||
def _get_by_path(self, data: Any, path: list) -> Any:
|
||||
"""Traverse data by path (strings and ints)."""
|
||||
current = data
|
||||
for seg in path:
|
||||
if current is None:
|
||||
return None
|
||||
if isinstance(current, dict) and isinstance(seg, str) and seg in current:
|
||||
current = current[seg]
|
||||
elif isinstance(current, (list, tuple)) and isinstance(seg, (int, str)):
|
||||
idx = int(seg) if isinstance(seg, str) and str(seg).isdigit() else seg
|
||||
if isinstance(idx, int) and 0 <= idx < len(current):
|
||||
current = current[idx]
|
||||
else:
|
||||
items.append(data)
|
||||
if mode == "combine" and len(items) == 2:
|
||||
if isinstance(items[0], dict) and isinstance(items[1], dict):
|
||||
return {**items[0], **items[1]}
|
||||
return items
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
return current
|
||||
|
||||
async def _wait(self, node: Dict, nodeOutputs: Dict) -> Any:
|
||||
secs = (node.get("parameters") or {}).get("seconds", 0)
|
||||
if secs > 0:
|
||||
await asyncio.sleep(min(float(secs), 300))
|
||||
nodeId = node.get("id")
|
||||
from modules.workflows.automation2.graphUtils import getInputSources
|
||||
# Input comes from context
|
||||
inp = context.get("_inputData") if "context" in dir() else None
|
||||
return nodeOutputs.get(nodeId, {})
|
||||
def _evalStructuredCondition(self, cond: Dict, nodeOutputs: Dict) -> bool:
|
||||
"""Evaluate structured {ref, operator, value} condition."""
|
||||
ref = cond.get("ref")
|
||||
if not ref or ref.get("type") != "ref":
|
||||
return False
|
||||
node_id = ref.get("nodeId")
|
||||
path = ref.get("path") or []
|
||||
left = self._get_by_path(nodeOutputs.get(node_id), list(path))
|
||||
operator = cond.get("operator", "eq")
|
||||
right = cond.get("value")
|
||||
|
||||
async def _wait(
|
||||
self,
|
||||
node: Dict,
|
||||
nodeOutputs: Dict,
|
||||
nodeId: str,
|
||||
inputSources: Dict,
|
||||
) -> Any:
|
||||
secs = (node.get("parameters") or {}).get("seconds", 0)
|
||||
if secs > 0:
|
||||
await asyncio.sleep(min(float(secs), 300))
|
||||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
return nodeOutputs.get(srcId)
|
||||
return None
|
||||
if operator == "eq":
|
||||
return left == right
|
||||
if operator == "neq":
|
||||
return left != right
|
||||
if operator in ("lt", "lte", "gt", "gte"):
|
||||
try:
|
||||
l, r = float(left) if left is not None else 0, float(right) if right is not None else 0
|
||||
if operator == "lt":
|
||||
return l < r
|
||||
if operator == "lte":
|
||||
return l <= r
|
||||
if operator == "gt":
|
||||
return l > r
|
||||
if operator == "gte":
|
||||
return l >= r
|
||||
except (TypeError, ValueError):
|
||||
return False
|
||||
if operator == "contains":
|
||||
return right is not None and str(right) in str(left or "")
|
||||
if operator == "not_contains":
|
||||
return right is None or str(right) not in str(left or "")
|
||||
if operator == "empty":
|
||||
return left is None or left == "" or (isinstance(left, (list, dict)) and len(left) == 0)
|
||||
if operator == "not_empty":
|
||||
return left is not None and left != "" and (not isinstance(left, (list, dict)) or len(left) > 0)
|
||||
if operator == "is_true":
|
||||
return bool(left)
|
||||
if operator == "is_false":
|
||||
return not bool(left)
|
||||
if operator == "before":
|
||||
return self._compare_dates(left, right, lambda a, b: a < b)
|
||||
if operator == "after":
|
||||
return self._compare_dates(left, right, lambda a, b: a > b)
|
||||
if operator == "exists":
|
||||
return self._file_exists(left)
|
||||
if operator == "not_exists":
|
||||
return not self._file_exists(left)
|
||||
return False
|
||||
|
||||
def _compare_dates(self, left: Any, right: Any, op) -> bool:
|
||||
"""Compare left/right as dates; op(a,b) is the comparison."""
|
||||
|
||||
def parse(v):
|
||||
if v is None:
|
||||
return None
|
||||
if hasattr(v, "timestamp"):
|
||||
return v
|
||||
s = str(v).strip()
|
||||
if not s:
|
||||
return None
|
||||
from datetime import datetime
|
||||
|
||||
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"):
|
||||
try:
|
||||
return datetime.strptime(s, fmt)
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
return datetime.fromisoformat(s.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
try:
|
||||
a, b = parse(left), parse(right)
|
||||
if a is None or b is None:
|
||||
return False
|
||||
return op(a, b)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _file_exists(self, val: Any) -> bool:
|
||||
"""Check if value represents an existing file (object with url or non-empty string)."""
|
||||
if val is None:
|
||||
return False
|
||||
if isinstance(val, dict):
|
||||
return bool(val.get("url") or val.get("name"))
|
||||
if isinstance(val, str):
|
||||
return len(val.strip()) > 0
|
||||
return bool(val)
|
||||
|
||||
def _evalCondition(self, resolved: Any) -> bool:
|
||||
"""Evaluate condition: ref resolves to value → use truthiness; string → try eval."""
|
||||
if resolved is None:
|
||||
return False
|
||||
if isinstance(resolved, (bool, int, float)):
|
||||
return bool(resolved)
|
||||
if isinstance(resolved, str):
|
||||
try:
|
||||
return bool(eval(resolved))
|
||||
except Exception:
|
||||
return bool(resolved)
|
||||
return bool(resolved)
|
||||
|
||||
async def _switch(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
valueExpr = (node.get("parameters") or {}).get("value", "")
|
||||
|
|
@ -133,14 +202,71 @@ class FlowExecutor:
|
|||
value = resolveParameterReferences(valueExpr, nodeOutputs)
|
||||
cases = (node.get("parameters") or {}).get("cases", [])
|
||||
for i, c in enumerate(cases):
|
||||
if c == value:
|
||||
if self._evalSwitchCase(value, c):
|
||||
return {"match": i, "value": value}
|
||||
return {"match": -1, "value": value}
|
||||
|
||||
def _evalSwitchCase(self, left: Any, case: Any) -> bool:
|
||||
"""
|
||||
Evaluate a switch case. Case can be:
|
||||
- dict: {operator, value} - use operator to compare left vs value
|
||||
- plain value: legacy format - exact equality (eq)
|
||||
"""
|
||||
if isinstance(case, dict):
|
||||
operator = case.get("operator", "eq")
|
||||
right = case.get("value")
|
||||
else:
|
||||
operator = "eq"
|
||||
right = case
|
||||
# Same logic as _evalStructuredCondition but with explicit left/right
|
||||
if operator == "eq":
|
||||
return left == right
|
||||
if operator == "neq":
|
||||
return left != right
|
||||
if operator in ("lt", "lte", "gt", "gte"):
|
||||
try:
|
||||
l, r = float(left) if left is not None else 0, float(right) if right is not None else 0
|
||||
if operator == "lt":
|
||||
return l < r
|
||||
if operator == "lte":
|
||||
return l <= r
|
||||
if operator == "gt":
|
||||
return l > r
|
||||
if operator == "gte":
|
||||
return l >= r
|
||||
except (TypeError, ValueError):
|
||||
return False
|
||||
if operator == "contains":
|
||||
return right is not None and str(right) in str(left or "")
|
||||
if operator == "not_contains":
|
||||
return right is None or str(right) not in str(left or "")
|
||||
if operator == "empty":
|
||||
return left is None or left == "" or (isinstance(left, (list, dict)) and len(left) == 0)
|
||||
if operator == "not_empty":
|
||||
return left is not None and left != "" and (not isinstance(left, (list, dict)) or len(left) > 0)
|
||||
if operator == "is_true":
|
||||
return bool(left)
|
||||
if operator == "is_false":
|
||||
return not bool(left)
|
||||
if operator == "before":
|
||||
return self._compare_dates(left, right, lambda a, b: a < b)
|
||||
if operator == "after":
|
||||
return self._compare_dates(left, right, lambda a, b: a > b)
|
||||
if operator == "exists":
|
||||
return self._file_exists(left)
|
||||
if operator == "not_exists":
|
||||
return not self._file_exists(left)
|
||||
return False
|
||||
|
||||
async def _loop(self, node: Dict, nodeOutputs: Dict, nodeId: str, inputSources: Dict) -> Any:
|
||||
itemsPath = (node.get("parameters") or {}).get("items", "[]")
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
items = resolveParameterReferences(itemsPath, nodeOutputs)
|
||||
if not isinstance(items, list):
|
||||
if isinstance(items, list):
|
||||
pass
|
||||
elif isinstance(items, dict):
|
||||
# Convert form payload / object to list of {name, value} for "for each field"
|
||||
items = [{"name": k, "value": v} for k, v in items.items()]
|
||||
else:
|
||||
items = [items] if items is not None else []
|
||||
return {"items": items, "count": len(items)}
|
||||
|
|
|
|||
|
|
@ -45,18 +45,22 @@ class IOExecutor:
|
|||
if 0 in inputSources:
|
||||
srcId, _ = inputSources[0]
|
||||
inp = nodeOutputs.get(srcId)
|
||||
if isinstance(inp, dict):
|
||||
resolvedParams.setdefault("documentList", inp.get("documents", inp.get("documentList", [])))
|
||||
from modules.workflows.automation2.executors.actionNodeExecutor import _getDocumentsFromUpstream
|
||||
docs = _getDocumentsFromUpstream(inp) if isinstance(inp, dict) else []
|
||||
if docs:
|
||||
resolvedParams.setdefault("documentList", docs)
|
||||
elif inp is not None:
|
||||
resolvedParams.setdefault("input", inp)
|
||||
|
||||
executor = ActionExecutor(self.services)
|
||||
logger.info("IOExecutor node %s calling executeAction(%s, %s)", nodeId, methodName, actionName)
|
||||
result = await executor.executeAction(methodName, actionName, resolvedParams)
|
||||
docs_list = [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])]
|
||||
out = {
|
||||
"success": result.success,
|
||||
"error": result.error,
|
||||
"documents": [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])],
|
||||
"documents": docs_list,
|
||||
"documentList": docs_list,
|
||||
"data": result.model_dump() if hasattr(result, "model_dump") else {"success": result.success, "error": result.error},
|
||||
}
|
||||
logger.info(
|
||||
|
|
|
|||
|
|
@ -1,37 +1,34 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Trigger node executor.
|
||||
# Start node executor (node type trigger.manual) — outputs the unified run envelope from context.
|
||||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.workflows.automation2.runEnvelope import normalize_run_envelope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TriggerExecutor:
|
||||
"""Execute trigger nodes (manual, schedule, formSubmit)."""
|
||||
"""
|
||||
Single start node on the canvas. Output is always context['runEnvelope'], normalized.
|
||||
Invocation mode (manual, form, webhook, …) is configured as workflow entry points, not here.
|
||||
"""
|
||||
|
||||
async def execute(
|
||||
self,
|
||||
node: Dict[str, Any],
|
||||
context: Dict[str, Any],
|
||||
) -> Any:
|
||||
nodeType = node.get("type", "")
|
||||
nodeId = node.get("id", "")
|
||||
logger.info("TriggerExecutor node %s type=%s parameters=%s", nodeId, nodeType, node.get("parameters"))
|
||||
if nodeType == "trigger.manual":
|
||||
out = {"triggered": True, "source": "manual"}
|
||||
logger.info("TriggerExecutor node %s -> manual trigger: %s", nodeId, out)
|
||||
return out
|
||||
if nodeType == "trigger.schedule":
|
||||
out = {"triggered": True, "source": "schedule"}
|
||||
logger.info("TriggerExecutor node %s -> schedule trigger: %s", nodeId, out)
|
||||
return out
|
||||
if nodeType == "trigger.formSubmit":
|
||||
params = node.get("parameters") or {}
|
||||
formId = params.get("formId", "")
|
||||
out = {"triggered": True, "source": "formSubmit", "formId": formId}
|
||||
logger.info("TriggerExecutor node %s -> formSubmit: %s", nodeId, out)
|
||||
return out
|
||||
out = {"triggered": True, "source": "unknown"}
|
||||
logger.info("TriggerExecutor node %s -> unknown: %s", nodeId, out)
|
||||
node_id = node.get("id", "")
|
||||
base = context.get("runEnvelope")
|
||||
if not isinstance(base, dict):
|
||||
out = normalize_run_envelope(None, user_id=context.get("userId"))
|
||||
else:
|
||||
out = normalize_run_envelope(base, user_id=context.get("userId"))
|
||||
logger.info(
|
||||
"TriggerExecutor node %s trigger.type=%s",
|
||||
node_id,
|
||||
(out.get("trigger") or {}).get("type"),
|
||||
)
|
||||
return out
|
||||
|
|
|
|||
|
|
@ -47,6 +47,27 @@ def buildConnectionMap(connections: List[Dict]) -> Dict[str, List[Tuple[str, int
|
|||
return out
|
||||
|
||||
|
||||
def getLoopBodyNodeIds(loopNodeId: str, connectionMap: Dict[str, List[Tuple[str, int, int]]]) -> Set[str]:
|
||||
"""Nodes reachable from loop's output (BFS forward). Body = downstream nodes that receive from loop."""
|
||||
from collections import deque
|
||||
body = set()
|
||||
# connectionMap: target -> [(source, sourceOutput, targetInput)]
|
||||
rev: Dict[str, List[str]] = {} # source -> [targets]
|
||||
for tgt, pairs in connectionMap.items():
|
||||
for src, _, _ in pairs:
|
||||
if src not in rev:
|
||||
rev[src] = []
|
||||
rev[src].append(tgt)
|
||||
q = deque([loopNodeId])
|
||||
while q:
|
||||
nid = q.popleft()
|
||||
for tgt in rev.get(nid, []):
|
||||
if tgt not in body:
|
||||
body.add(tgt)
|
||||
q.append(tgt)
|
||||
return body
|
||||
|
||||
|
||||
def getInputSources(nodeId: str, connectionMap: Dict[str, List[Tuple[str, int, int]]]) -> Dict[int, Tuple[str, int]]:
|
||||
"""
|
||||
For a node, return targetInput -> (sourceNodeId, sourceOutput).
|
||||
|
|
@ -142,12 +163,59 @@ def topoSort(nodes: List[Dict], connectionMap: Dict[str, List[Tuple[str, int, in
|
|||
return order
|
||||
|
||||
|
||||
def _get_by_path(data: Any, path: List[Any]) -> Any:
|
||||
"""Traverse data by path (strings and ints); return None if not found."""
|
||||
current = data
|
||||
for seg in path:
|
||||
if current is None:
|
||||
return None
|
||||
if isinstance(current, dict) and isinstance(seg, str) and seg in current:
|
||||
current = current[seg]
|
||||
elif isinstance(current, (list, tuple)) and isinstance(seg, (int, str)):
|
||||
idx = int(seg) if isinstance(seg, str) and seg.isdigit() else seg
|
||||
if isinstance(idx, int) and 0 <= idx < len(current):
|
||||
current = current[idx]
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
return current
|
||||
|
||||
|
||||
def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
|
||||
"""
|
||||
Resolve {{nodeId.output}} or {{nodeId.output.path}} in strings/structures.
|
||||
Resolve parameter references:
|
||||
- {{nodeId.output}} or {{nodeId.output.path}} in strings (legacy)
|
||||
- { "type": "ref", "nodeId": "...", "path": ["field", "nested"] } -> resolved value
|
||||
- { "type": "value", "value": ... } -> value (then recursively resolve)
|
||||
"""
|
||||
import json
|
||||
import re
|
||||
|
||||
if isinstance(value, dict):
|
||||
if value.get("type") == "ref":
|
||||
node_id = value.get("nodeId")
|
||||
path = value.get("path")
|
||||
if node_id is not None and isinstance(path, (list, tuple)):
|
||||
data = nodeOutputs.get(node_id)
|
||||
plist = list(path)
|
||||
resolved = _get_by_path(data, plist)
|
||||
# input.form historically stored flat field dict; refs use payload.<field>
|
||||
if (
|
||||
resolved is None
|
||||
and isinstance(data, dict)
|
||||
and plist
|
||||
and plist[0] == "payload"
|
||||
and len(plist) > 1
|
||||
):
|
||||
resolved = _get_by_path(data, plist[1:])
|
||||
return resolveParameterReferences(resolved, nodeOutputs)
|
||||
return value
|
||||
if value.get("type") == "value":
|
||||
inner = value.get("value")
|
||||
return resolveParameterReferences(inner, nodeOutputs)
|
||||
return {k: resolveParameterReferences(v, nodeOutputs) for k, v in value.items()}
|
||||
|
||||
if isinstance(value, str):
|
||||
def repl(m):
|
||||
ref = m.group(1).strip()
|
||||
|
|
@ -170,8 +238,6 @@ def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
|
|||
return m.group(0)
|
||||
return str(data) if data is not None else m.group(0)
|
||||
return re.sub(r"\{\{\s*([^}]+)\s*\}\}", repl, value)
|
||||
if isinstance(value, dict):
|
||||
return {k: resolveParameterReferences(v, nodeOutputs) for k, v in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [resolveParameterReferences(v, nodeOutputs) for v in value]
|
||||
return value
|
||||
|
|
|
|||
109
modules/workflows/automation2/runEnvelope.py
Normal file
109
modules/workflows/automation2/runEnvelope.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""
|
||||
Unified run envelope for Automation2 start/trigger nodes.
|
||||
|
||||
Downstream nodes always see the same structure regardless of entry point
|
||||
(manual, form, schedule, webhook, email, api, event).
|
||||
"""
|
||||
|
||||
from copy import deepcopy
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
# trigger.type values
|
||||
TRIGGER_TYPES = frozenset(
|
||||
{
|
||||
"manual",
|
||||
"form",
|
||||
"schedule",
|
||||
"email",
|
||||
"webhook",
|
||||
"api",
|
||||
"event",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def default_run_envelope(
|
||||
trigger_type: str = "manual",
|
||||
*,
|
||||
entry_point_id: Optional[str] = None,
|
||||
entry_point_label: Optional[str] = None,
|
||||
payload: Optional[Dict[str, Any]] = None,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
files: Optional[List[Any]] = None,
|
||||
user: Optional[Dict[str, Any]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
raw: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""Build a normalized run envelope dict."""
|
||||
tt = trigger_type if trigger_type in TRIGGER_TYPES else "manual"
|
||||
trig: Dict[str, Any] = {"type": tt}
|
||||
if entry_point_id:
|
||||
trig["entryPointId"] = entry_point_id
|
||||
if entry_point_label:
|
||||
trig["label"] = entry_point_label
|
||||
return {
|
||||
"trigger": trig,
|
||||
"payload": dict(payload or {}),
|
||||
"context": dict(context or {}),
|
||||
"files": list(files or []),
|
||||
"user": dict(user or {}),
|
||||
"metadata": dict(metadata or {}),
|
||||
"raw": dict(raw or {}),
|
||||
}
|
||||
|
||||
|
||||
def merge_run_envelope(base: Dict[str, Any], overrides: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
||||
"""Deep-merge overrides into a copy of base (shallow merge per top-level key except nested dicts)."""
|
||||
out = deepcopy(base)
|
||||
if not overrides:
|
||||
return out
|
||||
for key in ("payload", "context", "user", "metadata", "raw"):
|
||||
if key in overrides and isinstance(overrides[key], dict):
|
||||
merged = dict(out.get(key) or {})
|
||||
merged.update(overrides[key])
|
||||
out[key] = merged
|
||||
if "files" in overrides and overrides["files"] is not None:
|
||||
out["files"] = list(overrides["files"])
|
||||
trig = dict(out.get("trigger") or {})
|
||||
ot = overrides.get("trigger")
|
||||
if isinstance(ot, dict):
|
||||
trig.update(ot)
|
||||
if trig.get("type") and trig["type"] not in TRIGGER_TYPES:
|
||||
trig["type"] = "manual"
|
||||
out["trigger"] = trig
|
||||
return out
|
||||
|
||||
|
||||
def normalize_run_envelope(
|
||||
incoming: Optional[Dict[str, Any]],
|
||||
*,
|
||||
user_id: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize partial or missing envelope from API/scheduler.
|
||||
Ensures all top-level keys exist.
|
||||
"""
|
||||
if not incoming or not isinstance(incoming, dict):
|
||||
env = default_run_envelope("manual")
|
||||
else:
|
||||
trig = incoming.get("trigger") if isinstance(incoming.get("trigger"), dict) else {}
|
||||
ttype = trig.get("type") or "manual"
|
||||
if ttype not in TRIGGER_TYPES:
|
||||
ttype = "manual"
|
||||
env = default_run_envelope(
|
||||
ttype,
|
||||
entry_point_id=trig.get("entryPointId"),
|
||||
entry_point_label=trig.get("label"),
|
||||
payload=incoming.get("payload"),
|
||||
context=incoming.get("context"),
|
||||
files=incoming.get("files"),
|
||||
user=incoming.get("user"),
|
||||
metadata=incoming.get("metadata"),
|
||||
raw=incoming.get("raw"),
|
||||
)
|
||||
if user_id and not env.get("user"):
|
||||
env["user"] = {"id": user_id}
|
||||
elif user_id and isinstance(env.get("user"), dict) and "id" not in env["user"]:
|
||||
env["user"] = {**env["user"], "id": user_id}
|
||||
return env
|
||||
34
modules/workflows/automation2/scheduleCron.py
Normal file
34
modules/workflows/automation2/scheduleCron.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""
|
||||
Parse cron strings (5-field or 6-field) to APScheduler CronTrigger kwargs.
|
||||
Frontend produces: "minute hour day month dow" (5-field) or "sec min hour day month dow" (6-field).
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def parse_cron_to_kwargs(cron: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse cron string to kwargs for APScheduler CronTrigger.
|
||||
Supports 5-field (minute hour day month day_of_week) and 6-field (sec min hour day month day_of_week).
|
||||
Returns dict with: second, minute, hour, day, month, day_of_week.
|
||||
"""
|
||||
if not cron or not isinstance(cron, str):
|
||||
raise ValueError("Invalid cron: empty or not string")
|
||||
parts = cron.strip().split()
|
||||
if len(parts) == 5:
|
||||
minute, hour, day, month, day_of_week = parts
|
||||
second = "0"
|
||||
elif len(parts) == 6:
|
||||
second, minute, hour, day, month, day_of_week = parts
|
||||
else:
|
||||
raise ValueError(f"Invalid cron format: expected 5 or 6 fields, got {len(parts)}")
|
||||
return {
|
||||
"second": second,
|
||||
"minute": minute,
|
||||
"hour": hour,
|
||||
"day": day,
|
||||
"month": month,
|
||||
"day_of_week": day_of_week,
|
||||
}
|
||||
304
modules/workflows/automation2/subAutomation2Schedule.py
Normal file
304
modules/workflows/automation2/subAutomation2Schedule.py
Normal file
|
|
@ -0,0 +1,304 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""
|
||||
Automation2 schedule scheduler.
|
||||
Starts/stops cron jobs for workflows with schedule entry points.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.shared.eventManagement import eventManager
|
||||
|
||||
# Main loop reference for scheduling async work from job executor (may run in thread)
|
||||
_main_loop = None
|
||||
|
||||
|
||||
def set_main_loop(loop) -> None:
|
||||
global _main_loop
|
||||
_main_loop = loop
|
||||
from modules.features.automation2.interfaceFeatureAutomation2 import (
|
||||
getAutomation2Interface,
|
||||
getAllWorkflowsForScheduling,
|
||||
)
|
||||
from modules.features.automation2.mainAutomation2 import getAutomation2Services
|
||||
from modules.features.automation2.entryPoints import find_invocation
|
||||
from modules.workflows.automation2.scheduleCron import parse_cron_to_kwargs
|
||||
|
||||
|
||||
def _cron_to_interval_seconds(cron: str):
|
||||
"""
|
||||
If cron represents a simple interval, return seconds. Otherwise None.
|
||||
E.g. "* * * * *" -> 60, "*/15 * * * *" -> 900, "*/30 * * * * *" -> 30.
|
||||
"""
|
||||
if not cron or not isinstance(cron, str):
|
||||
return None
|
||||
parts = cron.strip().split()
|
||||
if len(parts) == 5:
|
||||
minute, hour, day, month, dow = parts
|
||||
second = "0"
|
||||
elif len(parts) == 6:
|
||||
second, minute, hour, day, month, dow = parts
|
||||
else:
|
||||
return None
|
||||
# Interval minutes: */N * * * *
|
||||
if minute.startswith("*/") and hour == "*" and day == "*" and month == "*" and dow == "*":
|
||||
n = int(minute[2:]) if minute[2:].isdigit() else 0
|
||||
if n > 0:
|
||||
return n * 60
|
||||
# Every minute: * * * * *
|
||||
if minute == "*" and hour == "*" and day == "*" and month == "*" and dow == "*" and second == "0":
|
||||
return 60
|
||||
# Interval hours: 0 */N * * *
|
||||
if minute == "0" and hour.startswith("*/") and day == "*" and month == "*" and dow == "*":
|
||||
n = int(hour[2:]) if hour[2:].isdigit() else 0
|
||||
if n > 0:
|
||||
return n * 3600
|
||||
# Interval seconds: */N * * * * * (6-field)
|
||||
if len(parts) == 6 and second.startswith("*/") and minute == "*" and hour == "*" and day == "*" and month == "*" and dow in ("*", "?"):
|
||||
n = int(second[2:]) if second[2:].isdigit() else 0
|
||||
if n > 0:
|
||||
return n
|
||||
return None
|
||||
from modules.workflows.automation2.executionEngine import executeGraph
|
||||
from modules.workflows.automation2.runEnvelope import default_run_envelope, normalize_run_envelope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JOB_ID_PREFIX = "automation2."
|
||||
|
||||
|
||||
def _remove_all_automation2_schedule_jobs() -> None:
|
||||
"""Remove all registered Automation2 schedule jobs from the scheduler."""
|
||||
if not eventManager.scheduler:
|
||||
return
|
||||
for job in list(eventManager.scheduler.get_jobs()):
|
||||
jid = job.id if hasattr(job, "id") else str(job)
|
||||
if jid.startswith(JOB_ID_PREFIX):
|
||||
try:
|
||||
eventManager.remove(jid)
|
||||
except Exception as e:
|
||||
logger.debug("Could not remove job %s: %s", jid, e)
|
||||
|
||||
|
||||
def sync_automation2_schedule_events(event_user) -> Dict[str, Any]:
|
||||
"""
|
||||
Sync scheduler with all active Automation2 workflows that have schedule entry points.
|
||||
Registers cron jobs for each; removes jobs for workflows no longer in the list.
|
||||
"""
|
||||
if not event_user:
|
||||
logger.warning("Automation2 schedule: No event user, skipping sync")
|
||||
return {"synced": 0, "events": {}}
|
||||
|
||||
_remove_all_automation2_schedule_jobs()
|
||||
|
||||
items = getAllWorkflowsForScheduling()
|
||||
registered = {}
|
||||
logger.info(
|
||||
"Automation2 schedule: found %d workflow(s) with trigger.schedule and cron",
|
||||
len(items),
|
||||
)
|
||||
|
||||
for item in items:
|
||||
workflow_id = item.get("workflowId")
|
||||
mandate_id = item.get("mandateId")
|
||||
instance_id = item.get("featureInstanceId")
|
||||
entry_point_id = item.get("entryPointId")
|
||||
cron = item.get("cron")
|
||||
workflow = item.get("workflow")
|
||||
|
||||
if not workflow_id or not instance_id or not cron:
|
||||
continue
|
||||
|
||||
job_id = f"{JOB_ID_PREFIX}{workflow_id}"
|
||||
async_handler = _create_schedule_handler(
|
||||
workflow_id=workflow_id,
|
||||
mandate_id=mandate_id,
|
||||
instance_id=instance_id,
|
||||
entry_point_id=entry_point_id,
|
||||
workflow=workflow,
|
||||
event_user=event_user,
|
||||
)
|
||||
|
||||
# Sync wrapper: schedule async handler on main loop (job may run in executor thread)
|
||||
def sync_wrapper():
|
||||
loop = _main_loop
|
||||
if loop and loop.is_running():
|
||||
loop.call_soon_threadsafe(
|
||||
lambda: asyncio.ensure_future(async_handler(), loop=loop)
|
||||
)
|
||||
else:
|
||||
# Fallback: run inline if no loop (shouldn't happen)
|
||||
try:
|
||||
asyncio.run(async_handler())
|
||||
except RuntimeError:
|
||||
logger.warning("Automation2 schedule: could not run handler, no event loop")
|
||||
|
||||
# Use IntervalTrigger for "every N minutes" - more reliable than CronTrigger
|
||||
interval_seconds = _cron_to_interval_seconds(cron)
|
||||
if interval_seconds is not None:
|
||||
eventManager.registerInterval(
|
||||
jobId=job_id,
|
||||
func=sync_wrapper,
|
||||
seconds=interval_seconds,
|
||||
replaceExisting=True,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
cron_kwargs = parse_cron_to_kwargs(cron)
|
||||
eventManager.registerCron(
|
||||
jobId=job_id,
|
||||
func=sync_wrapper,
|
||||
cronKwargs=cron_kwargs,
|
||||
replaceExisting=True,
|
||||
)
|
||||
except ValueError as e:
|
||||
logger.warning("Workflow %s: invalid cron %r: %s", workflow_id, cron, e)
|
||||
continue
|
||||
registered[workflow_id] = job_id
|
||||
mode = "interval" if interval_seconds is not None else "cron"
|
||||
logger.info(
|
||||
"Automation2 schedule: registered %s for workflow %s (%s=%s)",
|
||||
job_id,
|
||||
workflow_id,
|
||||
mode,
|
||||
interval_seconds if interval_seconds is not None else cron,
|
||||
)
|
||||
|
||||
if not registered and items:
|
||||
logger.warning("Automation2 schedule: workflows found but none registered (check cron format)")
|
||||
elif not items:
|
||||
logger.info("Automation2 schedule: no workflows with trigger.schedule+cron (save workflow after selecting Zeitplan)")
|
||||
return {"synced": len(registered), "workflowsFound": len(items), "events": registered}
|
||||
|
||||
|
||||
def _create_schedule_handler(
|
||||
workflow_id: str,
|
||||
mandate_id: str,
|
||||
instance_id: str,
|
||||
entry_point_id: str,
|
||||
workflow: Dict[str, Any],
|
||||
event_user,
|
||||
):
|
||||
"""Create async handler for scheduled workflow execution."""
|
||||
|
||||
async def handler():
|
||||
logger.info("Automation2 schedule: CRON FIRED for workflow %s", workflow_id)
|
||||
try:
|
||||
if not event_user:
|
||||
logger.error("Automation2 schedule: event user not available")
|
||||
return
|
||||
|
||||
a2 = getAutomation2Interface(event_user, mandate_id, instance_id)
|
||||
wf = a2.getWorkflow(workflow_id)
|
||||
if not wf or not wf.get("graph"):
|
||||
logger.warning("Automation2 schedule: workflow %s not found or no graph", workflow_id)
|
||||
return
|
||||
if not wf.get("active", True):
|
||||
logger.info("Automation2 schedule: workflow %s inactive, skipping", workflow_id)
|
||||
return
|
||||
|
||||
inv = find_invocation(wf, entry_point_id)
|
||||
if inv and (inv.get("kind") != "schedule" or not inv.get("enabled", True)):
|
||||
logger.info("Automation2 schedule: entry point %s disabled for workflow %s", entry_point_id, workflow_id)
|
||||
return
|
||||
# If inv not found but graph has trigger.schedule, proceed (invocations may not be synced)
|
||||
|
||||
services = getAutomation2Services(
|
||||
event_user,
|
||||
mandateId=mandate_id,
|
||||
featureInstanceId=instance_id,
|
||||
)
|
||||
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
|
||||
discoverMethods(services)
|
||||
|
||||
title = (inv or {}).get("title") or {}
|
||||
label = ""
|
||||
if isinstance(title, dict):
|
||||
label = title.get("en") or title.get("de") or ""
|
||||
elif isinstance(title, str):
|
||||
label = title
|
||||
|
||||
run_env = default_run_envelope(
|
||||
"schedule",
|
||||
entry_point_id=entry_point_id,
|
||||
entry_point_label=label or None,
|
||||
)
|
||||
run_env = normalize_run_envelope(run_env, user_id=str(event_user.id) if event_user else None)
|
||||
|
||||
# userId=None so tasks are created unassigned and visible to all instance users
|
||||
result = await executeGraph(
|
||||
graph=wf["graph"],
|
||||
services=services,
|
||||
workflowId=workflow_id,
|
||||
instanceId=instance_id,
|
||||
userId=None,
|
||||
mandateId=mandate_id,
|
||||
automation2_interface=a2,
|
||||
run_envelope=run_env,
|
||||
)
|
||||
logger.info(
|
||||
"Automation2 schedule: executed workflow %s success=%s paused=%s",
|
||||
workflow_id,
|
||||
result.get("success"),
|
||||
result.get("paused"),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception("Automation2 schedule: failed to execute workflow %s: %s", workflow_id, e)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
def start(event_user) -> bool:
|
||||
"""
|
||||
Start Automation2 schedule scheduler and sync scheduled workflows.
|
||||
Registers callback so schedule is re-synced when workflows are created/updated/deleted.
|
||||
"""
|
||||
if not event_user:
|
||||
logger.warning("Automation2 schedule: No event user provided, skipping")
|
||||
return True
|
||||
|
||||
try:
|
||||
eventManager.start()
|
||||
sync_automation2_schedule_events(event_user)
|
||||
logger.info("Automation2 schedule: sync complete")
|
||||
|
||||
# Delayed sync (5s) in case DB was not ready at startup
|
||||
def do_delayed_sync():
|
||||
import threading
|
||||
def _run():
|
||||
import time
|
||||
time.sleep(5)
|
||||
try:
|
||||
sync_automation2_schedule_events(event_user)
|
||||
logger.info("Automation2 schedule: delayed sync done")
|
||||
except Exception as e:
|
||||
logger.warning("Automation2 schedule: delayed sync failed: %s", e)
|
||||
t = threading.Thread(target=_run, daemon=True)
|
||||
t.start()
|
||||
do_delayed_sync()
|
||||
|
||||
def on_workflow_changed(_context=None):
|
||||
try:
|
||||
sync_automation2_schedule_events(event_user)
|
||||
logger.debug("Automation2 schedule: re-synced after workflow change")
|
||||
except Exception as e:
|
||||
logger.warning("Automation2 schedule: re-sync failed: %s", e)
|
||||
|
||||
from modules.shared.callbackRegistry import callbackRegistry
|
||||
callbackRegistry.register("automation2.workflow.changed", on_workflow_changed)
|
||||
except Exception as e:
|
||||
logger.error("Automation2 schedule: Failed to start: %s", e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def stop(event_user) -> bool:
|
||||
"""Stop Automation2 schedule scheduler (remove all schedule jobs)."""
|
||||
try:
|
||||
_remove_all_automation2_schedule_jobs()
|
||||
logger.info("Automation2 schedule: all jobs removed")
|
||||
except Exception as e:
|
||||
logger.warning("Automation2 schedule: error during stop: %s", e)
|
||||
return True
|
||||
6
modules/workflows/methods/methodClickup/__init__.py
Normal file
6
modules/workflows/methods/methodClickup/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
from .methodClickup import MethodClickup
|
||||
|
||||
__all__ = ["MethodClickup"]
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp workflow actions."""
|
||||
213
modules/workflows/methods/methodClickup/actions/create_task.py
Normal file
213
modules/workflows/methods/methodClickup/actions/create_task.py
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
from ..helpers.pathparse import parse_team_and_list
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _as_str(v: Any) -> str:
|
||||
if v is None:
|
||||
return ""
|
||||
return str(v).strip()
|
||||
|
||||
|
||||
def _parse_custom_field_values(parameters: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Build ClickUp custom_fields array from customFieldValues map (field id -> value)."""
|
||||
raw = parameters.get("customFieldValues")
|
||||
if raw is None:
|
||||
return []
|
||||
if isinstance(raw, str) and raw.strip():
|
||||
try:
|
||||
raw = json.loads(raw)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
if not isinstance(raw, dict):
|
||||
return []
|
||||
out: List[Dict[str, Any]] = []
|
||||
for fid, val in raw.items():
|
||||
if val is None or val == "":
|
||||
continue
|
||||
if isinstance(val, dict) and val.get("type") in ("ref", "value"):
|
||||
continue
|
||||
out.append({"id": str(fid), "value": val})
|
||||
return out
|
||||
|
||||
|
||||
def _unwrap_value(v: Any) -> Any:
|
||||
if isinstance(v, dict) and v.get("type") == "value" and "value" in v:
|
||||
return v.get("value")
|
||||
return v
|
||||
|
||||
|
||||
def _parse_int_list(val: Any) -> List[int]:
|
||||
if val is None:
|
||||
return []
|
||||
val = _unwrap_value(val)
|
||||
if isinstance(val, str) and val.strip():
|
||||
try:
|
||||
parsed = json.loads(val)
|
||||
if isinstance(parsed, list):
|
||||
return [int(x) for x in parsed if x is not None and str(x).strip() != ""]
|
||||
except (json.JSONDecodeError, ValueError, TypeError):
|
||||
return []
|
||||
if isinstance(val, list):
|
||||
out: List[int] = []
|
||||
for x in val:
|
||||
if x is None or (isinstance(x, str) and not x.strip()):
|
||||
continue
|
||||
try:
|
||||
out.append(int(x))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
return out
|
||||
return []
|
||||
|
||||
|
||||
def _optional_positive_int(v: Any) -> Optional[int]:
|
||||
v = _unwrap_value(v)
|
||||
if v is None or v == "":
|
||||
return None
|
||||
try:
|
||||
i = int(float(v))
|
||||
return i if i > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_due_date_ms(v: Any) -> Optional[int]:
|
||||
"""Accept Unix ms or ISO date string (YYYY-MM-DD) from form payload."""
|
||||
v = _unwrap_value(v)
|
||||
if v is None or v == "":
|
||||
return None
|
||||
if isinstance(v, str) and len(v) >= 10 and v[4] == "-" and v[7] == "-":
|
||||
try:
|
||||
dt = datetime.strptime(v[:10], "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
||||
return int(dt.timestamp() * 1000)
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
i = int(float(v))
|
||||
return i if i > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_time_estimate_hours_to_ms(v: Any) -> Optional[int]:
|
||||
v = _unwrap_value(v)
|
||||
if v is None or v == "":
|
||||
return None
|
||||
try:
|
||||
h = float(v)
|
||||
if h < 0:
|
||||
return None
|
||||
return int(round(h * 3600 * 1000))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
|
||||
def _apply_standard_task_fields(body: Dict[str, Any], parameters: Dict[str, Any]) -> None:
|
||||
"""Map first-class node params to ClickUp POST /list/{{id}}/task body (before taskFields merge)."""
|
||||
ts = _as_str(parameters.get("taskStatus") or parameters.get("clickupStatus"))
|
||||
if ts:
|
||||
body["status"] = ts
|
||||
pr = parameters.get("taskPriority")
|
||||
pr = _unwrap_value(pr)
|
||||
if pr is not None and pr != "":
|
||||
try:
|
||||
pi = int(float(pr))
|
||||
if 1 <= pi <= 4:
|
||||
body["priority"] = pi
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
dd = parameters.get("taskDueDateMs")
|
||||
dms = _parse_due_date_ms(dd)
|
||||
if dms is not None:
|
||||
body["due_date"] = dms
|
||||
assignees = _parse_int_list(parameters.get("taskAssigneeIds"))
|
||||
if assignees:
|
||||
body["assignees"] = assignees
|
||||
teh = parameters.get("taskTimeEstimateHours")
|
||||
tem_h = _parse_time_estimate_hours_to_ms(teh)
|
||||
if tem_h is not None:
|
||||
body["time_estimate"] = tem_h
|
||||
else:
|
||||
te = parameters.get("taskTimeEstimateMs")
|
||||
tem = _optional_positive_int(te)
|
||||
if tem is not None:
|
||||
body["time_estimate"] = tem
|
||||
|
||||
|
||||
def _merge_custom_fields(body: Dict[str, Any], items: List[Dict[str, Any]]) -> None:
|
||||
if not items:
|
||||
return
|
||||
existing = body.get("custom_fields")
|
||||
if isinstance(existing, list) and existing:
|
||||
by_id: Dict[str, Dict[str, Any]] = {}
|
||||
for x in existing:
|
||||
if isinstance(x, dict) and x.get("id") is not None:
|
||||
by_id[str(x["id"])] = x
|
||||
for item in items:
|
||||
by_id[str(item["id"])] = item
|
||||
body["custom_fields"] = list(by_id.values())
|
||||
else:
|
||||
body["custom_fields"] = items
|
||||
|
||||
|
||||
async def create_task(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
list_id = (parameters.get("listId") or "").strip()
|
||||
path_query = (parameters.get("pathQuery") or parameters.get("path") or "").strip()
|
||||
name = _as_str(parameters.get("name"))
|
||||
description = _as_str(parameters.get("description"))
|
||||
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
if not list_id and path_query:
|
||||
_t, lid = parse_team_and_list(path_query)
|
||||
list_id = lid or list_id
|
||||
if not list_id:
|
||||
return ActionResult.isFailure(error="listId or path /team/{teamId}/list/{listId} is required")
|
||||
if not name:
|
||||
return ActionResult.isFailure(error="name is required")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
body: Dict[str, Any] = {"name": name}
|
||||
if description:
|
||||
body["description"] = description
|
||||
_apply_standard_task_fields(body, parameters)
|
||||
extra = parameters.get("taskFields")
|
||||
if isinstance(extra, str) and extra.strip():
|
||||
try:
|
||||
parsed = json.loads(extra)
|
||||
if isinstance(parsed, dict):
|
||||
body.update(parsed)
|
||||
except json.JSONDecodeError:
|
||||
return ActionResult.isFailure(error="taskFields must be valid JSON object")
|
||||
elif isinstance(extra, dict):
|
||||
body.update(extra)
|
||||
|
||||
cf_items = _parse_custom_field_values(parameters)
|
||||
if cf_items:
|
||||
_merge_custom_fields(body, cf_items)
|
||||
|
||||
data = await self.services.clickup.createTask(list_id, body)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName="clickup_create_task.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={"actionType": "clickup.createTask", "listId": list_id},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
40
modules/workflows/methods/methodClickup/actions/get_task.py
Normal file
40
modules/workflows/methods/methodClickup/actions/get_task.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
from ..helpers.pathparse import parse_task_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_task(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
task_id = (parameters.get("taskId") or "").strip()
|
||||
path_hint = (parameters.get("path") or parameters.get("pathQuery") or "").strip()
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
|
||||
if not task_id and path_hint:
|
||||
task_id = parse_task_id(path_hint) or ""
|
||||
if not task_id:
|
||||
return ActionResult.isFailure(error="taskId is required (or path ending in .../task/{id})")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
data = await self.services.clickup.getTask(task_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName=f"clickup_task_{task_id}.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={"actionType": "clickup.getTask", "taskId": task_id},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
from ..helpers.pathparse import parse_team_and_list
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def list_tasks(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
path_query = (parameters.get("pathQuery") or parameters.get("path") or "").strip()
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
if not path_query:
|
||||
return ActionResult.isFailure(error="path (virtual path to a list) is required, e.g. /team/{teamId}/list/{listId}")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
team_id, list_id = parse_team_and_list(path_query)
|
||||
if not list_id:
|
||||
return ActionResult.isFailure(
|
||||
error="path must be /team/{teamId}/list/{listId} (browse to a list in the ClickUp picker)"
|
||||
)
|
||||
|
||||
page = int(parameters.get("page") or 0)
|
||||
include_closed = bool(parameters.get("includeClosed", False))
|
||||
data = await self.services.clickup.getTasksInList(
|
||||
list_id, page=page, include_closed=include_closed, subtasks=True
|
||||
)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName="clickup_list_tasks.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={
|
||||
"actionType": "clickup.listTasks",
|
||||
"teamId": team_id,
|
||||
"listId": list_id,
|
||||
"path": path_query,
|
||||
},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
221
modules/workflows/methods/methodClickup/actions/search_tasks.py
Normal file
221
modules/workflows/methods/methodClickup/actions/search_tasks.py
Normal file
|
|
@ -0,0 +1,221 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_DESC_MAX = 4000
|
||||
_MAX_LIST_PAGES = 50
|
||||
|
||||
_DASHES = re.compile(r"[\u2010-\u2015\-]")
|
||||
|
||||
|
||||
def _norm_title(s: str) -> str:
|
||||
"""Lowercase, unify hyphens/dashes, collapse spaces (helps full-title matches)."""
|
||||
t = (s or "").strip().lower()
|
||||
t = _DASHES.sub("-", t)
|
||||
t = re.sub(r"\s+", " ", t)
|
||||
return t
|
||||
|
||||
|
||||
def _title_contains_query(name: str, query: str) -> bool:
|
||||
if not query:
|
||||
return True
|
||||
n = _norm_title(name)
|
||||
q = _norm_title(query)
|
||||
if q in n:
|
||||
return True
|
||||
return query.lower() in (name or "").lower()
|
||||
|
||||
|
||||
def _task_text_for_broad(t: Dict[str, Any]) -> str:
|
||||
parts: List[str] = []
|
||||
if t.get("name"):
|
||||
parts.append(str(t["name"]))
|
||||
d = t.get("description") or t.get("text_content") or t.get("textcontent") or ""
|
||||
if d:
|
||||
parts.append(str(d))
|
||||
return " ".join(parts).lower()
|
||||
|
||||
|
||||
def _task_matches_query(t: Dict[str, Any], query: str, *, match_name_only: bool) -> bool:
|
||||
if not query:
|
||||
return True
|
||||
if match_name_only:
|
||||
return _title_contains_query((t.get("name") or ""), query)
|
||||
return query.lower() in _task_text_for_broad(t)
|
||||
|
||||
|
||||
def _pick(d: Dict[str, Any], *keys: str, default: Any = None) -> Any:
|
||||
for k in keys:
|
||||
if k in d and d[k] is not None:
|
||||
return d[k]
|
||||
return default
|
||||
|
||||
|
||||
def _slim_custom_field(cf: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Only include custom fields that have a value (omit null noise)."""
|
||||
val = cf.get("value")
|
||||
if val is None or val == "":
|
||||
return None
|
||||
return {
|
||||
"id": cf.get("id"),
|
||||
"name": cf.get("name"),
|
||||
"type": cf.get("type"),
|
||||
"value": val,
|
||||
}
|
||||
|
||||
|
||||
def _slim_clickup_task(t: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Usable automation payload — not the full ClickUp API mirror (no nested typeconfig)."""
|
||||
status = t.get("status")
|
||||
if not isinstance(status, dict):
|
||||
status = {}
|
||||
li = t.get("list")
|
||||
if not isinstance(li, dict):
|
||||
li = {}
|
||||
|
||||
desc = _pick(t, "description", "Description", default="") or ""
|
||||
if len(desc) > _DESC_MAX:
|
||||
desc = desc[:_DESC_MAX] + "…(truncated)"
|
||||
|
||||
assignees: List[Dict[str, Any]] = []
|
||||
for a in t.get("assignees") or []:
|
||||
if isinstance(a, dict):
|
||||
assignees.append(
|
||||
{
|
||||
"id": a.get("id"),
|
||||
"username": a.get("username"),
|
||||
"email": a.get("email"),
|
||||
}
|
||||
)
|
||||
|
||||
cfs = t.get("custom_fields") or t.get("customfields") or []
|
||||
slim_cf: List[Dict[str, Any]] = []
|
||||
for cf in cfs:
|
||||
if isinstance(cf, dict):
|
||||
row = _slim_custom_field(cf)
|
||||
if row is not None:
|
||||
slim_cf.append(row)
|
||||
|
||||
out: Dict[str, Any] = {
|
||||
"id": t.get("id"),
|
||||
"name": t.get("name"),
|
||||
"text_content": _pick(t, "text_content", "textcontent"),
|
||||
"description": desc,
|
||||
"status": status.get("status"),
|
||||
"url": t.get("url"),
|
||||
"list": {"id": li.get("id"), "name": li.get("name")} if li else None,
|
||||
"date_created": _pick(t, "date_created", "datecreated"),
|
||||
"date_updated": _pick(t, "date_updated", "dateupdated"),
|
||||
"due_date": _pick(t, "due_date", "duedate"),
|
||||
}
|
||||
if assignees:
|
||||
out["assignees"] = assignees
|
||||
if slim_cf:
|
||||
out["custom_fields"] = slim_cf
|
||||
pr = t.get("priority")
|
||||
if pr is not None:
|
||||
out["priority"] = pr
|
||||
return out
|
||||
|
||||
|
||||
def _slim_search_payload(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
tasks = data.get("tasks") or []
|
||||
slim_tasks = [_slim_clickup_task(t) if isinstance(t, dict) else t for t in tasks]
|
||||
out: Dict[str, Any] = {k: v for k, v in data.items() if k != "tasks"}
|
||||
out["tasks"] = slim_tasks
|
||||
out["_nyla"] = {
|
||||
"slim": True,
|
||||
"hint": "Set fullTaskData=true for raw ClickUp API objects.",
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
async def search_tasks(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
team_id = (parameters.get("teamId") or "").strip()
|
||||
query = (parameters.get("query") or parameters.get("searchQuery") or "").strip()
|
||||
list_id_filter = (parameters.get("listId") or "").strip()
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
if not team_id:
|
||||
return ActionResult.isFailure(error="teamId is required (workspace id from ClickUp)")
|
||||
if not query:
|
||||
return ActionResult.isFailure(error="query is required")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
full_task_data = bool(parameters.get("fullTaskData") or parameters.get("fullPayload"))
|
||||
if "matchNameOnly" in parameters:
|
||||
match_name_only = bool(parameters.get("matchNameOnly"))
|
||||
elif "matchTitle" in parameters:
|
||||
match_name_only = bool(parameters.get("matchTitle"))
|
||||
else:
|
||||
match_name_only = True
|
||||
|
||||
page = int(parameters.get("page") or 0)
|
||||
include_closed = bool(parameters.get("includeClosed", False))
|
||||
|
||||
if list_id_filter:
|
||||
# List API: scan pages in this list and match locally (team search does not scope to one table).
|
||||
filtered_tasks: List[Dict[str, Any]] = []
|
||||
p = page
|
||||
while p < page + _MAX_LIST_PAGES:
|
||||
batch = await self.services.clickup.getTasksInList(
|
||||
list_id_filter, page=p, include_closed=include_closed, subtasks=True
|
||||
)
|
||||
if isinstance(batch, dict) and batch.get("error"):
|
||||
return ActionResult.isFailure(error=str(batch.get("error")) + (batch.get("body") or ""))
|
||||
tasks = batch.get("tasks") or []
|
||||
last = bool(batch.get("last_page") or batch.get("lastpage"))
|
||||
for t in tasks:
|
||||
if isinstance(t, dict) and _task_matches_query(t, query, match_name_only=match_name_only):
|
||||
filtered_tasks.append(t)
|
||||
if last or not tasks:
|
||||
break
|
||||
p += 1
|
||||
data: Dict[str, Any] = {"tasks": filtered_tasks, "lastpage": True}
|
||||
search_mode = "list"
|
||||
else:
|
||||
data = await self.services.clickup.searchTeamTasks(team_id, query=query, page=page)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
if match_name_only and isinstance(data, dict):
|
||||
tasks = data.get("tasks") or []
|
||||
filtered = [
|
||||
t
|
||||
for t in tasks
|
||||
if isinstance(t, dict) and _title_contains_query((t.get("name") or ""), query)
|
||||
]
|
||||
data = {**data, "tasks": filtered}
|
||||
search_mode = "team"
|
||||
|
||||
if isinstance(data, dict) and not full_task_data:
|
||||
data = _slim_search_payload(data)
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName="clickup_search_tasks.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={
|
||||
"actionType": "clickup.searchTasks",
|
||||
"teamId": team_id,
|
||||
"query": query,
|
||||
"slim": not full_task_data,
|
||||
"matchNameOnly": match_name_only,
|
||||
"searchMode": search_mode,
|
||||
"listId": list_id_filter or None,
|
||||
"includeClosed": include_closed if list_id_filter else None,
|
||||
},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
from ..helpers.pathparse import parse_task_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def update_task(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
task_id = (parameters.get("taskId") or "").strip()
|
||||
path_hint = (parameters.get("path") or "").strip()
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
if not task_id and path_hint:
|
||||
task_id = parse_task_id(path_hint) or ""
|
||||
if not task_id:
|
||||
return ActionResult.isFailure(error="taskId is required")
|
||||
|
||||
raw_update = parameters.get("taskUpdate") or parameters.get("taskJson") or parameters.get("body")
|
||||
if raw_update is None or raw_update == "":
|
||||
return ActionResult.isFailure(error="taskUpdate (JSON object) is required — add update fields or advanced JSON")
|
||||
if isinstance(raw_update, str):
|
||||
try:
|
||||
body = json.loads(raw_update)
|
||||
except json.JSONDecodeError as e:
|
||||
return ActionResult.isFailure(error=f"taskUpdate must be valid JSON: {e}")
|
||||
elif isinstance(raw_update, dict):
|
||||
body = raw_update
|
||||
else:
|
||||
return ActionResult.isFailure(error="taskUpdate must be a JSON string or object")
|
||||
|
||||
if not isinstance(body, dict):
|
||||
return ActionResult.isFailure(error="taskUpdate JSON must be an object")
|
||||
if not body:
|
||||
return ActionResult.isFailure(error="taskUpdate is empty — set at least one field to update")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
data = await self.services.clickup.updateTask(task_id, body)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName=f"clickup_task_{task_id}_updated.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={"actionType": "clickup.updateTask", "taskId": task_id},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionDocument, ActionResult
|
||||
from ..helpers.pathparse import parse_task_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def upload_attachment(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
connection_reference = parameters.get("connectionReference")
|
||||
task_id = (parameters.get("taskId") or "").strip()
|
||||
path_hint = (parameters.get("path") or "").strip()
|
||||
if not connection_reference:
|
||||
return ActionResult.isFailure(error="connectionReference is required")
|
||||
if not task_id and path_hint:
|
||||
task_id = parse_task_id(path_hint) or ""
|
||||
if not task_id:
|
||||
return ActionResult.isFailure(error="taskId is required")
|
||||
|
||||
conn = self.connection.get_clickup_connection(connection_reference)
|
||||
if not conn:
|
||||
return ActionResult.isFailure(error="No valid ClickUp connection")
|
||||
|
||||
content_param = parameters.get("content")
|
||||
if not content_param:
|
||||
return ActionResult.isFailure(error="content is required (connect a file node upstream)")
|
||||
|
||||
content = content_param[0] if isinstance(content_param, (list, tuple)) and content_param else content_param
|
||||
file_name = parameters.get("fileName")
|
||||
file_bytes = None
|
||||
|
||||
if isinstance(content, dict):
|
||||
file_name = file_name or content.get("documentName") or content.get("fileName") or "attachment"
|
||||
raw_data = content.get("documentData")
|
||||
if (content.get("validationMetadata") or {}).get("fileId") and not raw_data:
|
||||
fid = content["validationMetadata"]["fileId"]
|
||||
try:
|
||||
raw = self.services.chat.getFileData(fid)
|
||||
file_bytes = raw if isinstance(raw, bytes) else str(raw).encode("utf-8")
|
||||
except Exception as e:
|
||||
return ActionResult.isFailure(error=f"Could not load file {fid}: {e}")
|
||||
elif raw_data is not None:
|
||||
if isinstance(raw_data, bytes):
|
||||
file_bytes = raw_data
|
||||
elif isinstance(raw_data, str):
|
||||
try:
|
||||
file_bytes = base64.b64decode(raw_data)
|
||||
except Exception:
|
||||
file_bytes = raw_data.encode("utf-8")
|
||||
else:
|
||||
return ActionResult.isFailure(error="Unsupported documentData type")
|
||||
else:
|
||||
return ActionResult.isFailure(error="Could not read file bytes from content")
|
||||
elif hasattr(content, "documentData"):
|
||||
file_name = file_name or getattr(content, "documentName", None) or getattr(content, "fileName", None) or "attachment"
|
||||
raw_data = content.documentData
|
||||
if isinstance(raw_data, bytes):
|
||||
file_bytes = raw_data
|
||||
elif isinstance(raw_data, str):
|
||||
try:
|
||||
file_bytes = base64.b64decode(raw_data)
|
||||
except Exception:
|
||||
file_bytes = raw_data.encode("utf-8")
|
||||
else:
|
||||
return ActionResult.isFailure(error="Unsupported documentData on ActionDocument")
|
||||
else:
|
||||
return ActionResult.isFailure(error="Unsupported content format")
|
||||
|
||||
if not file_bytes:
|
||||
return ActionResult.isFailure(error="Empty file content")
|
||||
|
||||
data = await self.services.clickup.uploadTaskAttachment(task_id, file_bytes, file_name or "file")
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return ActionResult.isFailure(error=str(data.get("error")) + (data.get("body") or ""))
|
||||
|
||||
doc = ActionDocument(
|
||||
documentName="clickup_upload_attachment.json",
|
||||
documentData=json.dumps(data, ensure_ascii=False, indent=2),
|
||||
mimeType="application/json",
|
||||
validationMetadata={"actionType": "clickup.uploadAttachment", "taskId": task_id},
|
||||
)
|
||||
return ActionResult.isSuccess(documents=[doc])
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Resolve ClickUp UserConnection and configure ClickupService."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClickupConnectionHelper:
|
||||
def __init__(self, method_instance: Any):
|
||||
self.method = method_instance
|
||||
self.services = method_instance.services
|
||||
|
||||
def get_clickup_connection(self, connection_reference: str) -> Optional[Dict[str, Any]]:
|
||||
try:
|
||||
ref = (connection_reference or "").split(" [")[0].strip()
|
||||
if not ref:
|
||||
return None
|
||||
user_connection = None
|
||||
if ref.startswith("connection:"):
|
||||
user_connection = self.services.chat.getUserConnectionFromConnectionReference(ref)
|
||||
else:
|
||||
app = getattr(self.services, "interfaceDbApp", None)
|
||||
if app and hasattr(app, "getUserConnectionById"):
|
||||
user_connection = app.getUserConnectionById(ref)
|
||||
if not user_connection:
|
||||
logger.warning("No user connection for reference/id %s", connection_reference)
|
||||
return None
|
||||
authority = getattr(user_connection.authority, "value", None) or str(
|
||||
user_connection.authority
|
||||
)
|
||||
if authority != "clickup":
|
||||
logger.warning("Connection %s is not ClickUp (authority=%s)", user_connection.id, authority)
|
||||
return None
|
||||
status = getattr(user_connection.status, "value", None) or str(user_connection.status)
|
||||
if status not in ("active", "pending"):
|
||||
logger.warning("Connection %s status not active: %s", user_connection.id, status)
|
||||
|
||||
cu = getattr(self.services, "clickup", None)
|
||||
if not cu:
|
||||
return None
|
||||
if not cu.setAccessTokenFromConnection(user_connection):
|
||||
logger.warning("Failed to set ClickUp token for connection %s", user_connection.id)
|
||||
return None
|
||||
return {"id": user_connection.id, "userConnection": user_connection}
|
||||
except Exception as e:
|
||||
logger.error("get_clickup_connection error: %s", e)
|
||||
return None
|
||||
26
modules/workflows/methods/methodClickup/helpers/pathparse.py
Normal file
26
modules/workflows/methods/methodClickup/helpers/pathparse.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Parse virtual ClickUp paths used by the connector."""
|
||||
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
def parse_team_and_list(path: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
p = (path or "").strip()
|
||||
m = re.match(r"^/team/([^/]+)/list/([^/]+)$", p)
|
||||
if m:
|
||||
return m.group(1), m.group(2)
|
||||
return None, None
|
||||
|
||||
|
||||
def parse_task_id(path_or_id: str) -> Optional[str]:
|
||||
s = (path_or_id or "").strip()
|
||||
if not s:
|
||||
return None
|
||||
m = re.match(r"^.*/task/([^/]+)$", s)
|
||||
if m:
|
||||
return m.group(1)
|
||||
if re.match(r"^[a-zA-Z0-9_-]+$", s) and len(s) > 4:
|
||||
return s
|
||||
return None
|
||||
349
modules/workflows/methods/methodClickup/methodClickup.py
Normal file
349
modules/workflows/methods/methodClickup/methodClickup.py
Normal file
|
|
@ -0,0 +1,349 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""ClickUp workflow method — list/search/get/create/update tasks and upload attachments."""
|
||||
|
||||
import logging
|
||||
|
||||
from modules.datamodels.datamodelWorkflowActions import WorkflowActionDefinition, WorkflowActionParameter
|
||||
from modules.shared.frontendTypes import FrontendType
|
||||
from modules.workflows.methods.methodBase import MethodBase
|
||||
|
||||
from .helpers.connection import ClickupConnectionHelper
|
||||
from .actions.list_tasks import list_tasks
|
||||
from .actions.search_tasks import search_tasks
|
||||
from .actions.get_task import get_task
|
||||
from .actions.create_task import create_task
|
||||
from .actions.update_task import update_task
|
||||
from .actions.upload_attachment import upload_attachment
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MethodClickup(MethodBase):
|
||||
"""ClickUp API actions for automation2 (lists as tables)."""
|
||||
|
||||
def __init__(self, services):
|
||||
super().__init__(services)
|
||||
self.name = "clickup"
|
||||
self.description = "ClickUp task and list operations"
|
||||
self.connection = ClickupConnectionHelper(self)
|
||||
|
||||
self._actions = {
|
||||
"listTasks": WorkflowActionDefinition(
|
||||
actionId="clickup.listTasks",
|
||||
description="List tasks in a ClickUp list (virtual path /team/{id}/list/{id})",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"pathQuery": WorkflowActionParameter(
|
||||
name="pathQuery",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=True,
|
||||
description="Virtual path to list: /team/{teamId}/list/{listId}",
|
||||
),
|
||||
"page": WorkflowActionParameter(
|
||||
name="page",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
default=0,
|
||||
description="Page index",
|
||||
),
|
||||
"includeClosed": WorkflowActionParameter(
|
||||
name="includeClosed",
|
||||
type="bool",
|
||||
frontendType=FrontendType.CHECKBOX,
|
||||
required=False,
|
||||
default=False,
|
||||
description="Include closed tasks",
|
||||
),
|
||||
},
|
||||
execute=list_tasks.__get__(self, self.__class__),
|
||||
),
|
||||
"searchTasks": WorkflowActionDefinition(
|
||||
actionId="clickup.searchTasks",
|
||||
description="Search tasks in a ClickUp workspace (team)",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"teamId": WorkflowActionParameter(
|
||||
name="teamId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=True,
|
||||
description="Workspace (team) ID",
|
||||
),
|
||||
"query": WorkflowActionParameter(
|
||||
name="query",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=True,
|
||||
description="Search query",
|
||||
),
|
||||
"page": WorkflowActionParameter(
|
||||
name="page",
|
||||
type="int",
|
||||
frontendType=FrontendType.NUMBER,
|
||||
required=False,
|
||||
default=0,
|
||||
description="Page index",
|
||||
),
|
||||
"listId": WorkflowActionParameter(
|
||||
name="listId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description=(
|
||||
"If set, tasks are loaded from this list via the list API (not team search). "
|
||||
"Use this to search the selected table."
|
||||
),
|
||||
),
|
||||
"includeClosed": WorkflowActionParameter(
|
||||
name="includeClosed",
|
||||
type="bool",
|
||||
frontendType=FrontendType.CHECKBOX,
|
||||
required=False,
|
||||
default=False,
|
||||
description="When listId is set, include closed tasks in list pages.",
|
||||
),
|
||||
"fullTaskData": WorkflowActionParameter(
|
||||
name="fullTaskData",
|
||||
type="bool",
|
||||
frontendType=FrontendType.CHECKBOX,
|
||||
required=False,
|
||||
default=False,
|
||||
description="If true, return raw ClickUp API task objects (large). Default is a slim payload.",
|
||||
),
|
||||
"matchNameOnly": WorkflowActionParameter(
|
||||
name="matchNameOnly",
|
||||
type="bool",
|
||||
frontendType=FrontendType.CHECKBOX,
|
||||
required=False,
|
||||
default=True,
|
||||
description="If true, keep only tasks whose title contains the search query (default: true).",
|
||||
),
|
||||
},
|
||||
execute=search_tasks.__get__(self, self.__class__),
|
||||
),
|
||||
"getTask": WorkflowActionDefinition(
|
||||
actionId="clickup.getTask",
|
||||
description="Get a single task by ID",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"taskId": WorkflowActionParameter(
|
||||
name="taskId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Task ID",
|
||||
),
|
||||
"pathQuery": WorkflowActionParameter(
|
||||
name="pathQuery",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Optional virtual path ending in /task/{taskId}",
|
||||
),
|
||||
},
|
||||
execute=get_task.__get__(self, self.__class__),
|
||||
),
|
||||
"createTask": WorkflowActionDefinition(
|
||||
actionId="clickup.createTask",
|
||||
description="Create a task in a list",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"listId": WorkflowActionParameter(
|
||||
name="listId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="List ID (if not using path)",
|
||||
),
|
||||
"pathQuery": WorkflowActionParameter(
|
||||
name="pathQuery",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Virtual path to list /team/{teamId}/list/{listId}",
|
||||
),
|
||||
"name": WorkflowActionParameter(
|
||||
name="name",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=True,
|
||||
description="Task name",
|
||||
),
|
||||
"description": WorkflowActionParameter(
|
||||
name="description",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description="Task description",
|
||||
),
|
||||
"customFieldValues": WorkflowActionParameter(
|
||||
name="customFieldValues",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description="Map of ClickUp custom field id to value (merged into custom_fields).",
|
||||
),
|
||||
"taskFields": WorkflowActionParameter(
|
||||
name="taskFields",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description="Optional extra JSON object merged into create payload (overrides standard fields)",
|
||||
),
|
||||
"taskStatus": WorkflowActionParameter(
|
||||
name="taskStatus",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="ClickUp status name for this list",
|
||||
),
|
||||
"taskPriority": WorkflowActionParameter(
|
||||
name="taskPriority",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Priority 1 (urgent)–4 (low), empty to omit",
|
||||
),
|
||||
"taskDueDateMs": WorkflowActionParameter(
|
||||
name="taskDueDateMs",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Due date as Unix ms timestamp",
|
||||
),
|
||||
"taskAssigneeIds": WorkflowActionParameter(
|
||||
name="taskAssigneeIds",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description="JSON array of ClickUp user ids, e.g. [123,456]",
|
||||
),
|
||||
"taskTimeEstimateMs": WorkflowActionParameter(
|
||||
name="taskTimeEstimateMs",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Time estimate in milliseconds",
|
||||
),
|
||||
"taskTimeEstimateHours": WorkflowActionParameter(
|
||||
name="taskTimeEstimateHours",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Time estimate in hours (converted to ms; preferred over taskTimeEstimateMs)",
|
||||
),
|
||||
},
|
||||
execute=create_task.__get__(self, self.__class__),
|
||||
),
|
||||
"updateTask": WorkflowActionDefinition(
|
||||
actionId="clickup.updateTask",
|
||||
description="Update a task (JSON body per ClickUp API)",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"taskId": WorkflowActionParameter(
|
||||
name="taskId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Task ID",
|
||||
),
|
||||
"path": WorkflowActionParameter(
|
||||
name="path",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Optional path ending in /task/{taskId}",
|
||||
),
|
||||
"taskUpdate": WorkflowActionParameter(
|
||||
name="taskUpdate",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXTAREA,
|
||||
required=False,
|
||||
description="JSON object for PUT /task/{id} (e.g. {\"name\":\"...\",\"status\":\"...\"}); built from editor rows if empty",
|
||||
),
|
||||
},
|
||||
execute=update_task.__get__(self, self.__class__),
|
||||
),
|
||||
"uploadAttachment": WorkflowActionDefinition(
|
||||
actionId="clickup.uploadAttachment",
|
||||
description="Upload a file attachment to a task",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"connectionReference": WorkflowActionParameter(
|
||||
name="connectionReference",
|
||||
type="str",
|
||||
frontendType=FrontendType.USER_CONNECTION,
|
||||
required=True,
|
||||
description="ClickUp connection",
|
||||
),
|
||||
"taskId": WorkflowActionParameter(
|
||||
name="taskId",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Task ID",
|
||||
),
|
||||
"path": WorkflowActionParameter(
|
||||
name="path",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Optional path ending in /task/{taskId}",
|
||||
),
|
||||
"fileName": WorkflowActionParameter(
|
||||
name="fileName",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Attachment file name",
|
||||
),
|
||||
"content": WorkflowActionParameter(
|
||||
name="content",
|
||||
type="Any",
|
||||
frontendType=FrontendType.DOCUMENT_REFERENCE,
|
||||
required=True,
|
||||
description="File from upstream node",
|
||||
),
|
||||
},
|
||||
execute=upload_attachment.__get__(self, self.__class__),
|
||||
),
|
||||
}
|
||||
self._validateActions()
|
||||
6
modules/workflows/methods/methodFile/__init__.py
Normal file
6
modules/workflows/methods/methodFile/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
from .methodFile import MethodFile
|
||||
|
||||
__all__ = ["MethodFile"]
|
||||
6
modules/workflows/methods/methodFile/actions/__init__.py
Normal file
6
modules/workflows/methods/methodFile/actions/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
from .create import create
|
||||
|
||||
__all__ = ["create"]
|
||||
147
modules/workflows/methods/methodFile/actions/create.py
Normal file
147
modules/workflows/methods/methodFile/actions/create.py
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.serviceCenter.services.serviceGeneration.subDocumentUtility import markdownToDocumentJson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _persistDocumentsToUserFiles(
|
||||
action_documents: list,
|
||||
services,
|
||||
) -> None:
|
||||
"""Persist file.create output documents to user's file storage (like upload).
|
||||
Adds fileId to each document's validationMetadata for download links in UI."""
|
||||
mgmt = getattr(services, "interfaceDbComponent", None)
|
||||
if not mgmt:
|
||||
try:
|
||||
import modules.interfaces.interfaceDbManagement as iface
|
||||
user = getattr(services, "user", None)
|
||||
if not user:
|
||||
return
|
||||
mgmt = iface.getInterface(
|
||||
user,
|
||||
mandateId=getattr(services, "mandateId", None) or "",
|
||||
featureInstanceId=getattr(services, "featureInstanceId", None) or "",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("file.create: could not get management interface for persistence: %s", e)
|
||||
return
|
||||
if not mgmt:
|
||||
return
|
||||
for doc in action_documents:
|
||||
try:
|
||||
doc_data = doc.documentData if hasattr(doc, "documentData") else doc.get("documentData")
|
||||
if not doc_data:
|
||||
continue
|
||||
if isinstance(doc_data, str):
|
||||
content = base64.b64decode(doc_data)
|
||||
else:
|
||||
content = doc_data
|
||||
doc_name = (
|
||||
getattr(doc, "documentName", None)
|
||||
or doc.get("documentName")
|
||||
or "output.pdf"
|
||||
)
|
||||
mime = (
|
||||
getattr(doc, "mimeType", None)
|
||||
or doc.get("mimeType")
|
||||
or "application/octet-stream"
|
||||
)
|
||||
file_item = mgmt.createFile(doc_name, mime, content)
|
||||
mgmt.createFileData(file_item.id, content)
|
||||
meta = getattr(doc, "validationMetadata", None) or doc.get("validationMetadata") or {}
|
||||
if isinstance(meta, dict):
|
||||
meta["fileId"] = file_item.id
|
||||
if hasattr(doc, "validationMetadata"):
|
||||
doc.validationMetadata = meta
|
||||
elif isinstance(doc, dict):
|
||||
doc["validationMetadata"] = meta
|
||||
logger.info("file.create: persisted %s to user files (id=%s)", doc_name, file_item.id)
|
||||
except Exception as e:
|
||||
dname = getattr(doc, "documentName", None) or doc.get("documentName", "?")
|
||||
logger.warning("file.create: failed to persist document %s: %s", dname, e)
|
||||
|
||||
|
||||
async def create(self, parameters: Dict[str, Any]) -> ActionResult:
|
||||
"""
|
||||
Create a file from context (text/markdown from upstream AI node).
|
||||
Uses GenerationService.renderReport to produce docx, pdf, txt, md, html, xlsx, etc.
|
||||
"""
|
||||
context = parameters.get("context", "") or ""
|
||||
if not isinstance(context, str):
|
||||
context = str(context) if context else ""
|
||||
context = context.strip()
|
||||
|
||||
if not context:
|
||||
return ActionResult.isFailure(error="context is required (connect an AI node or provide text)")
|
||||
|
||||
outputFormat = (parameters.get("outputFormat") or "docx").strip().lower().lstrip(".")
|
||||
title = (parameters.get("title") or "Document").strip()
|
||||
templateName = parameters.get("templateName")
|
||||
language = (parameters.get("language") or "de").strip()[:2]
|
||||
|
||||
try:
|
||||
structured_content = markdownToDocumentJson(context, title, language)
|
||||
if templateName:
|
||||
structured_content.setdefault("metadata", {})["templateName"] = templateName
|
||||
|
||||
generation = getattr(self.services, "generation", None)
|
||||
if not generation:
|
||||
return ActionResult.isFailure(error="Generation service not available")
|
||||
|
||||
ai_service = getattr(self.services, "ai", None)
|
||||
rendered_docs = await generation.renderReport(
|
||||
extractedContent=structured_content,
|
||||
outputFormat=outputFormat,
|
||||
language=language,
|
||||
title=title,
|
||||
userPrompt=None,
|
||||
aiService=ai_service,
|
||||
parentOperationId=parameters.get("parentOperationId"),
|
||||
)
|
||||
|
||||
if not rendered_docs:
|
||||
return ActionResult.isFailure(error="Rendering produced no output")
|
||||
|
||||
action_documents = []
|
||||
mime_map = {
|
||||
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
"pdf": "application/pdf",
|
||||
"txt": "text/plain",
|
||||
"md": "text/markdown",
|
||||
"html": "text/html",
|
||||
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
"csv": "text/csv",
|
||||
"json": "application/json",
|
||||
}
|
||||
for rd in rendered_docs:
|
||||
doc_data = rd.documentData if hasattr(rd, "documentData") else getattr(rd, "document_data", None)
|
||||
doc_name = getattr(rd, "filename", None) or getattr(rd, "documentName", None) or getattr(rd, "document_name", f"output.{outputFormat}")
|
||||
mime = getattr(rd, "mimeType", None) or getattr(rd, "mime_type", None) or mime_map.get(outputFormat, "application/octet-stream")
|
||||
|
||||
if isinstance(doc_data, bytes):
|
||||
doc_data = base64.b64encode(doc_data).decode("ascii")
|
||||
|
||||
action_documents.append(ActionDocument(
|
||||
documentName=doc_name,
|
||||
documentData=doc_data,
|
||||
mimeType=mime,
|
||||
validationMetadata={
|
||||
"actionType": "file.create",
|
||||
"outputFormat": outputFormat,
|
||||
"templateName": templateName,
|
||||
},
|
||||
))
|
||||
|
||||
_persistDocumentsToUserFiles(action_documents, self.services)
|
||||
return ActionResult.isSuccess(documents=action_documents)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"file.create failed: {e}", exc_info=True)
|
||||
return ActionResult.isFailure(error=str(e))
|
||||
81
modules/workflows/methods/methodFile/methodFile.py
Normal file
81
modules/workflows/methods/methodFile/methodFile.py
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
|
||||
import logging
|
||||
from modules.workflows.methods.methodBase import MethodBase
|
||||
from modules.datamodels.datamodelWorkflowActions import WorkflowActionDefinition, WorkflowActionParameter
|
||||
from modules.shared.frontendTypes import FrontendType
|
||||
|
||||
from .actions.create import create
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MethodFile(MethodBase):
|
||||
"""File creation methods - create documents from context (e.g. from AI nodes)."""
|
||||
|
||||
def __init__(self, services):
|
||||
super().__init__(services)
|
||||
self.name = "file"
|
||||
self.description = "File creation from context"
|
||||
|
||||
self._actions = {
|
||||
"create": WorkflowActionDefinition(
|
||||
actionId="file.create",
|
||||
description="Create a file from context (text/markdown from AI). Configurable format and style preset.",
|
||||
dynamicMode=True,
|
||||
parameters={
|
||||
"contentSources": WorkflowActionParameter(
|
||||
name="contentSources",
|
||||
type="list",
|
||||
frontendType=FrontendType.HIDDEN,
|
||||
required=False,
|
||||
description="Array of context refs. Resolved and concatenated. Empty = from connected node.",
|
||||
),
|
||||
"context": WorkflowActionParameter(
|
||||
name="context",
|
||||
type="str",
|
||||
frontendType=FrontendType.HIDDEN,
|
||||
required=False,
|
||||
description="Injected from contentSource or upstream connection",
|
||||
),
|
||||
"outputFormat": WorkflowActionParameter(
|
||||
name="outputFormat",
|
||||
type="str",
|
||||
frontendType=FrontendType.SELECT,
|
||||
frontendOptions=["docx", "pdf", "txt", "md", "html", "xlsx", "csv", "json"],
|
||||
required=True,
|
||||
default="docx",
|
||||
description="Output file format",
|
||||
),
|
||||
"title": WorkflowActionParameter(
|
||||
name="title",
|
||||
type="str",
|
||||
frontendType=FrontendType.TEXT,
|
||||
required=False,
|
||||
description="Document title",
|
||||
),
|
||||
"templateName": WorkflowActionParameter(
|
||||
name="templateName",
|
||||
type="str",
|
||||
frontendType=FrontendType.SELECT,
|
||||
frontendOptions=["default", "corporate", "minimal"],
|
||||
required=False,
|
||||
description="Style preset",
|
||||
),
|
||||
"language": WorkflowActionParameter(
|
||||
name="language",
|
||||
type="str",
|
||||
frontendType=FrontendType.SELECT,
|
||||
frontendOptions=["de", "en", "fr", "it", "es"],
|
||||
required=False,
|
||||
default="de",
|
||||
description="Language code",
|
||||
),
|
||||
},
|
||||
execute=create.__get__(self, self.__class__),
|
||||
),
|
||||
}
|
||||
|
||||
self._validateActions()
|
||||
self.create = create.__get__(self, self.__class__)
|
||||
|
|
@ -16,6 +16,7 @@ async def composeAndDraftEmailWithContext(self, parameters: Dict[str, Any]) -> A
|
|||
to = parameters.get("to") or [] # Optional for drafts - can save draft without recipients
|
||||
context = parameters.get("context")
|
||||
documentList = parameters.get("documentList") or []
|
||||
replySourceDocuments = parameters.get("replySourceDocuments") or [] # Original email(s) for reply attachment
|
||||
cc = parameters.get("cc") or []
|
||||
bcc = parameters.get("bcc") or []
|
||||
emailStyle = parameters.get("emailStyle") or "business"
|
||||
|
|
@ -34,6 +35,7 @@ async def composeAndDraftEmailWithContext(self, parameters: Dict[str, Any]) -> A
|
|||
if isinstance(to, str):
|
||||
to = [to]
|
||||
ai_attachments = []
|
||||
normalized_ai_attachments = []
|
||||
# Jump to create-email section (see below)
|
||||
else:
|
||||
direct_subject = parameters.get("subject")
|
||||
|
|
@ -44,6 +46,7 @@ async def composeAndDraftEmailWithContext(self, parameters: Dict[str, Any]) -> A
|
|||
if isinstance(to, str):
|
||||
to = [to]
|
||||
ai_attachments = []
|
||||
normalized_ai_attachments = []
|
||||
else:
|
||||
subject = None
|
||||
body = None
|
||||
|
|
@ -51,6 +54,12 @@ async def composeAndDraftEmailWithContext(self, parameters: Dict[str, Any]) -> A
|
|||
|
||||
use_direct_content = bool(subject and body)
|
||||
|
||||
# Ensure subject/body are strings (not bytes) for JSON serialization
|
||||
if subject and isinstance(subject, bytes):
|
||||
subject = subject.decode("utf-8", errors="replace")
|
||||
if body and isinstance(body, bytes):
|
||||
body = body.decode("utf-8", errors="replace")
|
||||
|
||||
if not use_direct_content:
|
||||
# Original path: require connectionReference and context
|
||||
if not connectionReference or not context:
|
||||
|
|
@ -263,9 +272,12 @@ Return JSON:
|
|||
# Add documents as attachments if provided
|
||||
# Supports: 1) inline ActionDocuments (dict with documentData from e.g. sharepoint.downloadFile)
|
||||
# 2) docItem:... references (chat workflow documents)
|
||||
if documentList:
|
||||
# 3) replySourceDocuments: original email(s) for reply – attach when use_direct_content
|
||||
# When use_direct_content: upstream AI doc IS the email body – do not attach it, BUT attach reply sources
|
||||
attachments_doc_list = (replySourceDocuments or []) if use_direct_content else (documentList or [])
|
||||
if attachments_doc_list:
|
||||
message["attachments"] = []
|
||||
for attachment_ref in documentList:
|
||||
for attachment_ref in attachments_doc_list:
|
||||
base64_content = None
|
||||
attach_name = "attachment"
|
||||
attach_mime = "application/octet-stream"
|
||||
|
|
@ -276,10 +288,57 @@ Return JSON:
|
|||
is_inline = bool(getattr(attachment_ref, "documentData", None))
|
||||
if is_inline:
|
||||
doc = attachment_ref
|
||||
base64_content = doc.get("documentData") if isinstance(doc, dict) else getattr(doc, "documentData", None)
|
||||
attach_name = (doc.get("documentName") or doc.get("fileName")) if isinstance(doc, dict) else (getattr(doc, "documentName", None) or getattr(doc, "fileName", "attachment"))
|
||||
attach_mime = (doc.get("mimeType") or attach_mime) if isinstance(doc, dict) else (getattr(doc, "mimeType", None) or attach_mime)
|
||||
raw_data = doc.get("documentData") if isinstance(doc, dict) else getattr(doc, "documentData", None)
|
||||
vm = doc.get("validationMetadata") or {} if isinstance(doc, dict) else (getattr(doc, "validationMetadata") or {})
|
||||
action_type = vm.get("actionType", "") if isinstance(vm, dict) else ""
|
||||
# Reply source: email search/read result – convert first email to .eml for proper reply attachment
|
||||
if "outlook" in action_type.lower() and "email" in action_type.lower() and raw_data:
|
||||
try:
|
||||
data = json.loads(raw_data) if isinstance(raw_data, str) else raw_data
|
||||
emails_list = []
|
||||
if isinstance(data, dict):
|
||||
sr = data.get("searchResults") or {}
|
||||
emails_list = sr.get("results", []) if isinstance(sr, dict) else []
|
||||
if not emails_list:
|
||||
ed = data.get("emails") or {}
|
||||
emails_list = ed.get("emails", []) if isinstance(ed, dict) else []
|
||||
if not emails_list and isinstance(data.get("emails"), list):
|
||||
emails_list = data["emails"]
|
||||
if emails_list and isinstance(emails_list[0], dict):
|
||||
em = emails_list[0]
|
||||
fr = em.get("from", em.get("sender", {}))
|
||||
addr = fr.get("emailAddress", {}) if isinstance(fr, dict) else {}
|
||||
from_addr = addr.get("address", "") or addr.get("name", "")
|
||||
subj = em.get("subject", "")
|
||||
body_obj = em.get("body") or {}
|
||||
body_content = body_obj.get("content", "") if isinstance(body_obj, dict) else str(body_obj)
|
||||
eml_lines = [
|
||||
f"From: {from_addr}",
|
||||
f"Subject: {subj}",
|
||||
"MIME-Version: 1.0",
|
||||
"Content-Type: text/html; charset=utf-8",
|
||||
"",
|
||||
body_content or "(no content)"
|
||||
]
|
||||
eml_bytes = "\n".join(eml_lines).encode("utf-8")
|
||||
base64_content = base64.b64encode(eml_bytes).decode("utf-8")
|
||||
attach_name = f"original_message_{subj[:30].replace(' ', '_') if subj else 'email'}.eml"
|
||||
attach_mime = "message/rfc822"
|
||||
except Exception as e:
|
||||
logger.debug("Could not convert email JSON to .eml: %s", e)
|
||||
base64_content = raw_data
|
||||
attach_name = (doc.get("documentName") or doc.get("fileName") or "attachment") if isinstance(doc, dict) else (getattr(doc, "documentName", None) or getattr(doc, "fileName", "attachment"))
|
||||
attach_mime = (doc.get("mimeType") or attach_mime) if isinstance(doc, dict) else (getattr(doc, "mimeType", None) or attach_mime)
|
||||
else:
|
||||
base64_content = raw_data
|
||||
attach_name = (doc.get("documentName") or doc.get("fileName")) if isinstance(doc, dict) else (getattr(doc, "documentName", None) or getattr(doc, "fileName", "attachment"))
|
||||
attach_mime = (doc.get("mimeType") or attach_mime) if isinstance(doc, dict) else (getattr(doc, "mimeType", None) or attach_mime)
|
||||
if base64_content and attach_name:
|
||||
# Microsoft Graph expects contentBytes as base64 string; documentData may be bytes (e.g. from ai.generateDocument)
|
||||
if isinstance(base64_content, bytes):
|
||||
base64_content = base64.b64encode(base64_content).decode("utf-8")
|
||||
elif not isinstance(base64_content, str):
|
||||
base64_content = base64.b64encode(str(base64_content).encode("utf-8")).decode("utf-8")
|
||||
message["attachments"].append({
|
||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
||||
"name": attach_name,
|
||||
|
|
@ -361,7 +420,7 @@ Return JSON:
|
|||
"recipients": to,
|
||||
"cc": cc,
|
||||
"bcc": bcc,
|
||||
"attachments": len(documentList) if documentList else 0,
|
||||
"attachments": len(message.get("attachments", [])),
|
||||
"aiSelectedAttachments": normalized_ai_attachments if normalized_ai_attachments else "all documents",
|
||||
"aiGenerated": True,
|
||||
"context": context,
|
||||
|
|
@ -373,10 +432,10 @@ Return JSON:
|
|||
# Extract attachment filenames for validation metadata
|
||||
attachmentFilenames = []
|
||||
attachmentReferences = []
|
||||
if documentList:
|
||||
if attachments_doc_list:
|
||||
# Inline docs (dict with documentName): use directly
|
||||
string_refs = [r for r in documentList if isinstance(r, str)]
|
||||
inline_docs = [r for r in documentList if isinstance(r, dict)]
|
||||
string_refs = [r for r in attachments_doc_list if isinstance(r, str)]
|
||||
inline_docs = [r for r in attachments_doc_list if isinstance(r, dict)]
|
||||
for d in inline_docs:
|
||||
name = d.get("documentName") or d.get("fileName")
|
||||
if name:
|
||||
|
|
|
|||
|
|
@ -15,6 +15,11 @@ async def uploadFile(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
if not connectionReference:
|
||||
return ActionResult.isFailure(error="connectionReference parameter is required")
|
||||
|
||||
# Set SharePoint access token first – required before siteDiscovery/sharepoint calls
|
||||
connection = self.connection.getMicrosoftConnection(connectionReference)
|
||||
if not connection:
|
||||
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
|
||||
|
||||
contentParam = parameters.get("content")
|
||||
if not contentParam:
|
||||
return ActionResult.isFailure(error="content parameter is required")
|
||||
|
|
@ -100,12 +105,7 @@ async def uploadFile(self, parameters: Dict[str, Any]) -> ActionResult:
|
|||
except Exception as e:
|
||||
return ActionResult.isFailure(error=f"Could not decode base64 file content: {str(e)}")
|
||||
|
||||
# Get Microsoft connection
|
||||
connection = self.connection.getMicrosoftConnection(connectionReference)
|
||||
if not connection:
|
||||
return ActionResult.isFailure(error="No valid Microsoft connection found for the provided connection reference")
|
||||
|
||||
# Upload file
|
||||
# Upload file (connection/token already set above)
|
||||
uploadResult = await self.services.sharepoint.uploadFile(
|
||||
siteId=siteId,
|
||||
folderPath=folderPath,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class WorkflowManager:
|
|||
if ctx is not None:
|
||||
ctx.workflow = workflow
|
||||
# Also update contexts on resolved services (they may be cached with different context refs)
|
||||
for attr in ('chat', 'ai', 'extraction', 'sharepoint', 'utils', 'billing', 'generation'):
|
||||
for attr in ('chat', 'ai', 'extraction', 'sharepoint', 'clickup', 'utils', 'billing', 'generation'):
|
||||
svc = getattr(self.services, attr, None)
|
||||
if svc is not None and hasattr(svc, '_context') and svc._context is not None:
|
||||
svc._context.workflow = workflow
|
||||
|
|
|
|||
62
tests/unit/workflows/test_automation2_graphUtils.py
Normal file
62
tests/unit/workflows/test_automation2_graphUtils.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Unit tests for automation2 graphUtils - resolveParameterReferences (ref/value format).
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from modules.workflows.automation2.graphUtils import resolveParameterReferences
|
||||
|
||||
|
||||
class TestResolveParameterReferences:
|
||||
"""Test structured ref/value resolution."""
|
||||
|
||||
def test_ref_simple(self):
|
||||
node_outputs = {
|
||||
"n1": {"payload": {"country": "CH"}},
|
||||
}
|
||||
value = {"type": "ref", "nodeId": "n1", "path": ["payload", "country"]}
|
||||
assert resolveParameterReferences(value, node_outputs) == "CH"
|
||||
|
||||
def test_ref_root(self):
|
||||
node_outputs = {"n1": {"a": 1, "b": 2}}
|
||||
value = {"type": "ref", "nodeId": "n1", "path": []}
|
||||
assert resolveParameterReferences(value, node_outputs) == {"a": 1, "b": 2}
|
||||
|
||||
def test_ref_nested(self):
|
||||
node_outputs = {"form_1": {"customer": {"country": "DE", "name": "Test"}}}
|
||||
value = {"type": "ref", "nodeId": "form_1", "path": ["customer", "country"]}
|
||||
assert resolveParameterReferences(value, node_outputs) == "DE"
|
||||
|
||||
def test_ref_array_index(self):
|
||||
node_outputs = {"n1": {"items": ["a", "b", "c"]}}
|
||||
value = {"type": "ref", "nodeId": "n1", "path": ["items", 1]}
|
||||
assert resolveParameterReferences(value, node_outputs) == "b"
|
||||
|
||||
def test_ref_missing_node(self):
|
||||
node_outputs = {}
|
||||
value = {"type": "ref", "nodeId": "missing", "path": ["x"]}
|
||||
assert resolveParameterReferences(value, node_outputs) == value
|
||||
|
||||
def test_value_wrapper(self):
|
||||
value = {"type": "value", "value": "static text"}
|
||||
assert resolveParameterReferences(value, {}) == "static text"
|
||||
|
||||
def test_value_nested_ref(self):
|
||||
node_outputs = {"n1": {"x": 42}}
|
||||
value = {"type": "value", "value": {"type": "ref", "nodeId": "n1", "path": ["x"]}}
|
||||
assert resolveParameterReferences(value, node_outputs) == 42
|
||||
|
||||
def test_dict_mixed_ref_value(self):
|
||||
node_outputs = {"n1": {"result": "hello"}}
|
||||
value = {
|
||||
"prompt": {"type": "ref", "nodeId": "n1", "path": ["result"]},
|
||||
"suffix": {"type": "value", "value": " world"},
|
||||
}
|
||||
result = resolveParameterReferences(value, node_outputs)
|
||||
assert result == {"prompt": "hello", "suffix": " world"}
|
||||
|
||||
def test_legacy_string_template(self):
|
||||
node_outputs = {"n1": {"country": "CH"}}
|
||||
value = "Land: {{n1.country}}"
|
||||
assert resolveParameterReferences(value, node_outputs) == "Land: CH"
|
||||
Loading…
Reference in a new issue