Tested workflow engine 3.1

This commit is contained in:
ValueOn AG 2025-09-24 23:18:10 +02:00
parent 472353fea0
commit 29c31e79bd
32 changed files with 1360 additions and 2063 deletions

6
app.py
View file

@ -208,9 +208,6 @@ async def lifespan(app: FastAPI):
logger.info("Application has been shut down")
# START APP
app = FastAPI(
title="PowerOn | Data Platform API",
@ -248,6 +245,9 @@ app.add_middleware(TokenRefreshMiddleware, enabled=True)
# Proactive token refresh middleware (refresh tokens before they expire)
app.add_middleware(ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5)
# Run triggered features
import modules.features.init
# Include all routers
from modules.routes.routeAdmin import router as generalRouter
app.include_router(generalRouter)

View file

@ -5,6 +5,8 @@ APP_ENV_TYPE = dev
APP_ENV_LABEL = Development Instance Patrick
APP_API_URL = http://localhost:8000
APP_KEY_SYSVAR = D:/Athi/Local/Web/poweron/local/key.txt
APP_INIT_PASS_ADMIN_SECRET = DEV_ENC:Z0FBQUFBQm8xRjFXZXRwU0NnLTNhdFVUU3ZlcFU4emRMa2xLRno5c0xwSVhOcjgzNlBUWnZ2V2RmQ0RmRmE4a3BTQ3FRMHN2aWdsSTAxSDJrUGJ2UmQwME5Pa3RNTTgyVFh6NUl4YTJoZTVxdkExUVkyWnpac1k9
APP_INIT_PASS_EVENT_SECRET = DEV_ENC:Z0FBQUFBQm8xRjFXQWY5WWpQYXUzX2dTQllVNk1Vb1J1S2t2NG9PMEYzSWNLeVR6WlhvYjU4TDFmYjZva01oZll5QWI0MHU5cXJvT0lvZHdoNW01WWxqRG9pdEQyWTcxYWlJVE5SRXVIMkh2VTRlYk1kSGRVNnM9
# PostgreSQL Storage (new)
DB_APP_HOST=localhost
@ -76,3 +78,6 @@ Connector_WebTavily_API_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8wSFJNSEJ2YmVieFRaWk5yR1k
# Google Cloud Speech Services configuration
Connector_GoogleSpeech_API_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8wSFJNSFFITGlUVzF3NE5Ldk10d3o5MS10Q2o4aEJGM250WF9CeWxFQVNaNHBhMk1hS3E5YXRrakh5dmx0VDJuZ3BsWGVMTC0tbU9wWFRWZWM1N25ibWpkeF84enJ1Y2ViMVd1V0plUWdxN3VId1VRUzBhN3MzLVBkSXEwM1BHT2Z2c3JBalh6eXVKMUNFX2pfbGdGYUg1ZUFfcXhSRnJyT0tzdWVVdG1HSHBZOUgwLUVPMVQ4YkZUc3dMcFlLWjRxQUM1X05OWm5ndmJGcjFETV9UM1FoLWt2RVVEem92UGhvZlRFXzNxOVRzQkhyV0hqeTRWQXdMdDVDbEMwOWFkTnV3UXpsYWZwRENaRzd4QjlwTjJUWHhHLVZPTzd1eXNhSWh5ajNwelgxSDRlNUx0N05yTlI1N1RjSzdIZGhFLXBOMjEwMkxsT0daSVhiWVpQZUtfNVdwdGVrazVMM2NkUGZPOHBuNjM3YXdFcGFPdlVtY01ReGhsVENwNnRvNGhJejNHd3hFOFA0bWgxalFFNDVoQ2xYTG5VN1dDZGhndEdWRlFjYzBRMUgwbzRfS2N3VVgyaXJpYmJfZzNadmx5cTFxS2Vja1I5Qm1UT0hDM1FuNk5JRmYtT2p3RWp2SWxTWGZuU1psOUN4NEJTOHkweWIzY2NjbTJRZG5oRjVxNGh4LTUwZE1zZi1zLU43Ulk4UGtmR0N6dU5RcVVvRF9DQlE5Sk1FR1YtOE84WnVuTDlOUHhQR1JLT2g0VkNIT2ctWTBuMXIwNHhSSjcxNnNWRFhQc18zSm1UR1M0Mm54TGxsRG5uX2tDSWhBNDRGaHFObkhuVmtnVVlQU1FhVWhTdnpGUDRfcDQ1OWpERklHMmN5Y0RVWC1JYlItTUozaWY1dmxZUW12NXAtUEtsQWpqUFk4NzFwWVNfSUNqeDNkc25wMnJHN3c5NTB1dmxmUFZfU0NWS1hQMTc1NmdOTmEyREZRVXB0cmlyaldkT3B0Q3FQMFdpdWQ3WU1RZDZKYlFneDdnQ2NWWHFHSXl1c2xRN21LbDdyUGFUcWFxeVVTOWoxSkVJaFZiUHI2VFBHWEdvM2Q1cXdIVGYyc3Y2cVdRd00ydHdrME8tcDVqSmNLV193R291VElTNWFNa2pMQi1zX21VdnZ1R0tTbEJndndvbWRrVE52eW1aTFFzRURtdGItc3FJeXJDenVTWTlIZ0E1eG1yX2N1SHJSUWIxdm8wakdzaDIyaDQ0cE9UdDlhclp2MzVVamQ2em0zbmdLUzBJa1ZaRFpQaTBnZGpTWnRhRGZxUVNZWDg5VDFndWFmZlZnVG5SUEhlWkpfQnREWS0xbEZfNXd5OUpEUkZHa1NZNWtPbnBadFFialgzazlyM0dTb3ctR2x5LUozT3VDc3F1Tk5TbGN2MnRRS1hTb1gzWUNVSlJuUl85azhxaGxCMzVNQUQzVGg1cDZHalRaOUFrM1JPSGJKaGlKRTAwbnV4TmxIZnhkMF9FODVKUk1GZGlWZk1ScnhmQnJXWmRxMTk3SWhIdnBjSVJJOElkalRUWXFRTFNvQXZpdFpFOUdDWkhHOTRLVmN2cEh0X2JpYjNvRjhvUHFVQVNQdXY4OWxQSWNvcUNfZW5HYy10dEFicldhRHZLS1ktY2RGczQta2lGWXkxb2RhNUZMNExabWx0dXdhR3BSWGpSYVUxRXJZVTNBYmdNVFd5NW1vY2s0T0RlV3hqZjNSMHhJakY1TDBackV5bmM2V1o2SEJlT3RSbnpPR0VXbmhQTUtPMzYyU1RjbFRmQUlWTUZjVGRheXBuekZJN3NNZVFFZ3JHenNnOFdQVWxsbFBoYTVvQUd1NGx2SDdYcGhrdUpSWlRIRWVVUkpxdjJSZV9zb0J3N3o4QnRpYXpTRHdkZ1pqSWswSjdJMjVEZDZUNzZuWDVXWkNxUDRtQ1p1dnk2ZEx0S0NKT2ZUc3B5eEdRdEpnTlZQMkt5OHFjQ3FfcHpzUFZEY3Z5WDdEQkt4cEN2MFg2eXF4bDZFeHZFWk5tMFpUR0xDZi1JVjN4eUtRaXlNXzBJUFV2N19MVTRhMWtxWnd6d0Y2bVNFQUJSdEU5Z01FTjEtZDJmWkpEYUlsTVJnTEJYdU1iVFoySEttd3libURrSUNJelVic2Mzb0t5ZzNDX0hjZUtfOFQ1QkxRWmx2dmhnbDhNZllla1dNa0Y5akVpNDRKdHRSUU9fTE9sYVUzdzZtTkJEYTBWdkxkRURSa01TOGxWcVZkUmxkWTA1QjJjS1pOUjJEQTZxeDdSVXhNWldXbnE1V1J2STVCNkt2VHRuNEdtaHUweWdEbUZyMlhWd09FWWI0UUFyQVpUeDE3QXdfQkMtcjdpUU5GUTQzUEczNWg1Wm5rVEgwRW11RFowVnFxYnpGNUYwYks1Y3JPbTdUc2ZXS1ZfYzdhcno3U1ZXZUVkblRoOVl5XzZpTUgwRXFZeFd6NXdqTGlvNm1QeXgxS2ZFTVJSV1JVejliWFBVRGU1MWVudEZzRDFwSW94YlU1Y3JmallsVldXcHdvTmFQdnU5UE0tNHNHMXhPWE1JQUxCNC1WVVRJNmNJcTM3a1dUWWwzSVptTFg3OXlWLWxITkdiR0MyTmRzRWFOeHBMZEVzbms3RC1MTFo1TVhKeURhUW9peHk1bHhJbHphVzR4RmxiUkJwcmkzcWZ3S3dWV0Jkb2VaZ3pMTXdUNUJmZjZfVEVXeDFNMnBvemM0TUJNeUQ2SE1aeWczc0V6M0NUMHFGdURMbTRka3AzZ1d1TUh2V1c5RzBKQVVlTEstWEthOTdaWUZHTlRHaVNmbEFJRFU3M0l2TWlBNF9kaFpJUXlxMHJYa2lxOGFRbDNqMTA1RDFFclFTcGxmb0g2WVI3Z0NrLWN4cUNzNWVuR2VMaE41dWRqMnR5eWNuM0gwUmIwcTFEQ09qbmJCUFIwbjM4MGF6TlhxQWpKOFZXWGNKdnl2Wi1zU1BsZU5NYWpsbzVKMGxTLUJKckd6enJnZWhXemstenN3NGNqUk9HeGlGaFNhSl83TlUzLTVZWW9zYVZZTTZzSjNfd3JkVDNaZVp4dk1GQVMxblJBRW1BWUZLU1VKUFkyQ1dPbndUNjYwdll2U0JxN1FQNk5OaGVYR3U5TXdGNGFVZGVXcS1tS2dwbVc1V3hEeXhVNkJ2cjdGX2FpY1NvOTJhcWFyOUVGOFpOdmd0R29Rb2RIaU01R05LeWRxUE00WlhOQVlMbkZxZDNyUFRXdUFGZ0lOUmp2RzIyaDlzMGxNQk40VzFzYjAwMEhjRVlrNWJ5cFhpVWYxQkxYQ25rUDJ3RTY1VlVFLThiNG1nY1hkdnZTMGoyVlN6dkJleFhndDNCODhlOVl1ZHBkci1hd3l0NGNXeWZ6aUp4S3pHS1c4aDM3WElBTjBwYlNSbmJoMk5SNF81VVNqd0dXY1JUejVsZnpGS1Z5dHFPNUVVM1I5eGhjblZjMV9idFJkc3NZaUdHRlIzQWJQdHhzT01qVW8xUUwxNHZmY3Q1aHBnNHhXTGRjb1BmTmM2X0NmdkpxNS1JMHNQNVg1N0xsd0pmdE8wNktkUGpuX0F3LURyaGhyajg3eWNDdkozUFZIYmpJTTZ3WWVCVFZUd1AtRklFUUxTNXkzalpfdlc4VE1tOHU1Q0MtUWdLbEdYRzdVU1RkM3gyeEY3eXBWLUhXVVo4VkZoUHVkakJPNk0tNTJKTU1JZjVISlR3SmJBQkVhRW51UHg3UjBOMVRPRnF2dzIwRkgxczBBUWZpemFFMzFTeDJfWHZhSkhsTzBhcFIzVmZRODEzRUl1b1ZDUGFqYUxjN2JsbkhYdHVPT00yYlUwbmpVbkU0RkJXbWx5UVFJdHNvNUdxQzMyQnQycDJpMjlnd2xwb3huRUJiZUg5dkhaMjhMV2R5T0NsU0N4WjdBX2ZfODhOdTZOZ0x6WlRIUGI3MzR1ZkJicHN6NzUzRzlsUmVkNlR6MjZjTTA3c290Qzh4ejRiWERHbmFtV1BQV2ZKb2pGU0F1OGsySG9hNHdtSkkxTWpwV2gyaVpWcFpsRWs5a0hSY3UzMk4wQ0dkZWtMbG4xOFZ6TXdEOXBob3I0NjNkT28tZk5IcW5FUkg4YnBtUVFLY1Q5M1lzYzhrRGZOaDF6SnpnejRuM1Y3SW1xMUJmLXpJdEM0UjNHU0t5OEhoamxxLXRmWmtyOS1ud09XeGFzc3VFXzNPWWNGcXFwdHN2cVFEZ0dWdUNKbF9Lc3d6dVhPb3NLMlNEaW1xd3JPLUViYV9GTnNRPT0=
# Feature SyncDelta JIRA configuration
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xRnZIVUdfQTg5dXRfenlxanFvdXBCSmltQmNhRFNkUnNYai0zWlZqV1NmS212SlhsRUdwYXRRcjVvRVdqaWN0RWNrMjNYWG1VazJOaU5xWnZDWFJoWGxvTXpTbHNzbk9nTGlxa0ZZQk5WZHNHaHVDdmNQYTBRbHBQaFlvY0FYM3NzU05MUHNZU1AxaWNCM1ZTQllLdFpZX2pWektnUTF1WGRiRGtsOGE0bVdPYXp4aEhlWDVTWmYzWlRuVl84Zk5pREZJay1zMWRpTWxKYnFtYzBXM01vMzdiN0JlQl9kYXN2Sy1ZZUdnT2dYNzFuajA1QUJVbTFKS1dfUjNUMS1MSC0wOGc1ZDd2NTJESTR6M2Y2SDhsUG40OWM0b2djeDdZTS1qOVl2YlhhbGx2dG1KWkd3andQZXJVVC1zWDdBMzdHOVdRMHRJN3RabzZNWWlEY0xqcUg4Nk9RPT0=

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -6,19 +6,34 @@ import logging
from typing import List, Dict, Any, Optional, Union, get_origin, get_args
from datetime import datetime
import uuid
from pydantic import BaseModel
from pydantic import BaseModel, Field
import threading
import time
from modules.shared.attributeUtils import to_dict
from modules.shared.attributeUtils import to_dict, ModelMixin
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.shared.configuration import APP_CONFIG
from modules.interfaces.interfaceAppModel import SystemTable
logger = logging.getLogger(__name__)
# No mapping needed - table name = Pydantic model name exactly
class SystemTable(BaseModel, ModelMixin):
"""Data model for system table entries"""
table_name: str = Field(
description="Name of the table",
frontend_type="text",
frontend_readonly=True,
frontend_required=True
)
initial_id: Optional[str] = Field(
default=None,
description="Initial ID for the table",
frontend_type="text",
frontend_readonly=True,
frontend_required=False
)
def _get_model_fields(model_class) -> Dict[str, str]:
"""Get all fields from Pydantic model and map to SQL types."""
if not hasattr(model_class, '__fields__'):

View file

@ -0,0 +1,145 @@
"""ClickUp connector for CRUD operations (compatible with TicketInterface)."""
from dataclasses import dataclass
from typing import Optional
import logging
import aiohttp
from modules.interfaces.interfaceTicketModel import (
TicketBase,
TicketFieldAttribute,
Task,
)
logger = logging.getLogger(__name__)
class ConnectorTicketClickup(TicketBase):
def __init__(
self,
*,
apiToken: str,
teamId: str,
listId: Optional[str] = None,
apiUrl: str = "https://api.clickup.com/api/v2",
) -> None:
self.apiToken = apiToken
self.teamId = teamId
self.listId = listId
self.apiUrl = apiUrl
def _headers(self) -> dict:
return {
"Authorization": self.apiToken,
"Content-Type": "application/json",
}
async def read_attributes(self) -> list[TicketFieldAttribute]:
"""Fetch field attributes. Uses list custom fields if listId provided; else basic fields."""
attributes: list[TicketFieldAttribute] = []
try:
async with aiohttp.ClientSession() as session:
if self.listId:
url = f"{self.apiUrl}/list/{self.listId}/field"
async with session.get(url, headers=self._headers()) as response:
if response.status != 200:
logger.warning(f"ClickUp fields fetch status: {response.status}")
else:
data = await response.json()
for field in data.get("fields", []):
fieldId = field.get("id")
fieldName = field.get("name", fieldId)
if fieldId:
attributes.append(TicketFieldAttribute(fieldName=fieldName, field=fieldId))
# Add common top-level fields
core_fields = [
("ID", "id"),
("Name", "name"),
("Status", "status.status"),
("Assignees", "assignees"),
("DateCreated", "date_created"),
("DueDate", "due_date"),
]
for name, fid in core_fields:
attributes.append(TicketFieldAttribute(fieldName=name, field=fid))
except Exception as e:
logger.error(f"ClickUp read_attributes error: {e}")
return attributes
async def read_tasks(self, *, limit: int = 0) -> list[Task]:
"""Read tasks from ClickUp, always returning full task records.
If list_id is set, read from that list; otherwise read from team.
"""
tasks: list[Task] = []
try:
async with aiohttp.ClientSession() as session:
page = 0
pageSize = 100
while True:
if self.listId:
url = f"{self.apiUrl}/list/{self.listId}/task?subtasks=true&page={page}&order_by=created&reverse=true"
else:
# Team-level search for open tasks
url = f"{self.apiUrl}/team/{self.teamId}/task?subtasks=true&page={page}&order_by=created&reverse=true"
# Request with parameters to include all fields where possible
async with session.get(url, headers=self._headers()) as response:
if response.status != 200:
errorText = await response.text()
logger.error(f"ClickUp read_tasks failed: {response.status} {errorText}")
break
data = await response.json()
items = data.get("tasks", [])
for item in items:
tasks.append(Task(data=item))
if limit and len(tasks) >= limit:
return tasks
if len(items) < pageSize:
break
page += 1
except Exception as e:
logger.error(f"ClickUp read_tasks error: {e}")
return tasks
async def write_tasks(self, tasklist: list[Task]) -> None:
"""Update tasks in ClickUp. Expects Task.data to contain {'ID' or 'id' or 'task_id', 'fields': {...}}"""
try:
async with aiohttp.ClientSession() as session:
for task in tasklist:
data = task.data
taskId = data.get("ID") or data.get("id") or data.get("task_id")
fields = data.get("fields", {})
if not taskId or not isinstance(fields, dict) or not fields:
continue
# Map generic fields to ClickUp payload
payload: dict = {}
for fieldId, value in fields.items():
# Heuristics: map common field ids
if fieldId in ("name", "summary"):
payload["name"] = value
elif fieldId in ("status",):
payload["status"] = value
elif fieldId.startswith("customfield_") or fieldId.startswith("cf_"):
# ClickUp custom fields need separate endpoint; attempt inline update if supported
if "custom_fields" not in payload:
payload["custom_fields"] = []
payload["custom_fields"].append({"id": fieldId, "value": value})
else:
# Best-effort assign to description for unknown text fields
if isinstance(value, str) and value:
payload.setdefault("description", value)
url = f"{self.apiUrl}/task/{taskId}"
async with session.put(url, headers=self._headers(), json=payload) as response:
if response.status not in (200, 204):
err = await response.text()
logger.error(f"ClickUp update failed for {taskId}: {response.status} {err}")
except Exception as e:
logger.error(f"ClickUp write_tasks error: {e}")

View file

@ -1,47 +1,32 @@
"""Jira connector for CRUD operations."""
"""Jira connector for CRUD operations (neutralized to generic ticket interface)."""
from dataclasses import dataclass
import os
import logging
import aiohttp
import asyncio
import json
from modules.interfaces.interfaceTicketModel import (
TicketBase,
TicketFieldAttribute,
Task,
)
from modules.interfaces.interfaceTicketModel import (TicketBase, TicketFieldAttribute, Task, )
logger = logging.getLogger(__name__)
@dataclass
class ConnectorTicketJira(TicketBase):
jira_username: str
jira_api_token: str
jira_url: str
project_code: str
issue_type: str
@classmethod
async def create(
cls,
def __init__(
self,
*,
jira_username: str,
jira_api_token: str,
jira_url: str,
project_code: str,
issue_type: str,
):
return ConnectorTicketJira(
jira_username=jira_username,
jira_api_token=jira_api_token,
jira_url=jira_url,
project_code=project_code,
issue_type=issue_type,
)
apiUsername: str,
apiToken: str,
apiUrl: str,
projectCode: str,
ticketType: str,
) -> None:
self.apiUsername = apiUsername
self.apiToken = apiToken
self.apiUrl = apiUrl
self.projectCode = projectCode
self.ticketType = ticketType
async def read_attributes(self) -> list[TicketFieldAttribute]:
"""
@ -52,22 +37,22 @@ class ConnectorTicketJira(TicketBase):
list[TicketFieldAttribute]: List of field attributes with names and IDs
"""
# Build JQL dynamically; allow empty or '*' issue_type to mean "all types"
if self.issue_type and self.issue_type != "*":
jql_query = f"project={self.project_code} AND issuetype={self.issue_type}"
if self.ticketType and self.ticketType != "*":
jql_query = f"project={self.projectCode} AND issuetype={self.ticketType}"
else:
jql_query = f"project={self.project_code}"
jql_query = f"project={self.projectCode}"
# Prepare the request URL (use JQL search endpoint)
url = f"{self.jira_url}/rest/api/3/search/jql"
url = f"{self.apiUrl}/rest/api/3/search/jql"
# Prepare authentication
auth = aiohttp.BasicAuth(self.jira_username, self.jira_api_token)
auth = aiohttp.BasicAuth(self.apiUsername, self.apiToken)
try:
async with aiohttp.ClientSession() as session:
headers = {"Content-Type": "application/json"}
payload = {
"jql": jql_query,
"jql": jql_query,
"maxResults": 1
# Don't specify fields to get all available fields
}
@ -100,9 +85,9 @@ class ConnectorTicketJira(TicketBase):
fields = issue.get("fields", {})
for field_id, value in fields.items():
field_name = field_names.get(field_id, field_id)
fieldName = field_names.get(field_id, field_id)
attributes.append(
TicketFieldAttribute(field_name=field_name, field=field_id)
TicketFieldAttribute(fieldName=fieldName, field=field_id)
)
logger.info(
@ -122,8 +107,8 @@ class ConnectorTicketJira(TicketBase):
async def _read_all_fields_via_fields_api(self) -> list[TicketFieldAttribute]:
"""Fallback: use Jira fields API to list all fields with id->name mapping."""
auth = aiohttp.BasicAuth(self.jira_username, self.jira_api_token)
url = f"{self.jira_url}/rest/api/3/field"
auth = aiohttp.BasicAuth(self.apiUsername, self.apiToken)
url = f"{self.apiUrl}/rest/api/3/field"
try:
async with aiohttp.ClientSession() as session:
async with session.get(url, auth=auth) as response:
@ -138,10 +123,10 @@ class ConnectorTicketJira(TicketBase):
attributes: list[TicketFieldAttribute] = []
for field in data:
field_id = field.get("id")
field_name = field.get("name", field_id)
fieldName = field.get("name", field_id)
if field_id:
attributes.append(
TicketFieldAttribute(field_name=field_name, field=field_id)
TicketFieldAttribute(fieldName=fieldName, field=field_id)
)
logger.info(
f"Successfully retrieved {len(attributes)} field attributes via fields API"
@ -162,10 +147,10 @@ class ConnectorTicketJira(TicketBase):
list[Task]: List of tasks with their data
"""
# Build JQL dynamically; allow empty or '*' issue_type to mean "all types"
if self.issue_type and self.issue_type != "*":
jql_query = f"project={self.project_code} AND issuetype={self.issue_type}"
if self.ticketType and self.ticketType != "*":
jql_query = f"project={self.projectCode} AND issuetype={self.ticketType}"
else:
jql_query = f"project={self.project_code}"
jql_query = f"project={self.projectCode}"
# Initialize variables for pagination (cursor-based /search/jql)
max_results = 100
@ -176,8 +161,8 @@ class ConnectorTicketJira(TicketBase):
seen_issue_ids: set[str] = set()
# Prepare authentication
auth = aiohttp.BasicAuth(self.jira_username, self.jira_api_token)
url = f"{self.jira_url}/rest/api/3/search/jql"
auth = aiohttp.BasicAuth(self.apiUsername, self.apiToken)
url = f"{self.apiUrl}/rest/api/3/search/jql"
try:
async with aiohttp.ClientSession() as session:
@ -286,7 +271,7 @@ class ConnectorTicketJira(TicketBase):
tasklist: List of Task objects containing task data to update
"""
headers = {"Accept": "application/json", "Content-Type": "application/json"}
auth = aiohttp.BasicAuth(self.jira_username, self.jira_api_token)
auth = aiohttp.BasicAuth(self.apiUsername, self.apiToken)
try:
async with aiohttp.ClientSession() as session:
@ -346,7 +331,7 @@ class ConnectorTicketJira(TicketBase):
update_data = {"fields": processed_fields}
# Make the update request
url = f"{self.jira_url}/rest/api/3/issue/{task_id}"
url = f"{self.apiUrl}/rest/api/3/issue/{task_id}"
async with session.put(
url, json=update_data, headers=headers, auth=auth
@ -365,3 +350,5 @@ class ConnectorTicketJira(TicketBase):
except Exception as e:
logger.error(f"Unexpected error while updating Jira tasks: {str(e)}")
raise

View file

@ -35,20 +35,7 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
# Configuration loading functions
def get_web_crawl_timeout() -> int:
"""Get web crawl timeout from configuration"""
return int(APP_CONFIG.get("Web_Crawl_TIMEOUT", "30"))
def get_web_crawl_max_retries() -> int:
"""Get web crawl max retries from configuration"""
return int(APP_CONFIG.get("Web_Crawl_MAX_RETRIES", "3"))
def get_web_crawl_retry_delay() -> int:
"""Get web crawl retry delay from configuration"""
return int(APP_CONFIG.get("Web_Crawl_RETRY_DELAY", "2"))
# Cached configuration values are loaded into the connector instance on creation
@dataclass
@ -66,13 +53,26 @@ class TavilyCrawlResult:
@dataclass
class ConnectorTavily(WebSearchBase, WebCrawlBase, WebScrapeBase):
client: AsyncTavilyClient = None
# Cached settings loaded at initialization time
crawl_timeout: int = 30
crawl_max_retries: int = 3
crawl_retry_delay: int = 2
@classmethod
async def create(cls):
api_key = APP_CONFIG.get("Connector_WebTavily_API_KEY_SECRET")
if not api_key:
raise ValueError("Tavily API key not configured. Please set Connector_WebTavily_API_KEY_SECRET in config.ini")
return cls(client=AsyncTavilyClient(api_key=api_key))
# Load and cache web crawl related configuration
crawl_timeout = int(APP_CONFIG.get("Web_Crawl_TIMEOUT", "30"))
crawl_max_retries = int(APP_CONFIG.get("Web_Crawl_MAX_RETRIES", "3"))
crawl_retry_delay = int(APP_CONFIG.get("Web_Crawl_RETRY_DELAY", "2"))
return cls(
client=AsyncTavilyClient(api_key=api_key),
crawl_timeout=crawl_timeout,
crawl_max_retries=crawl_max_retries,
crawl_retry_delay=crawl_retry_delay,
)
async def search_urls(self, request: WebSearchRequest) -> WebSearchActionResult:
"""Handles the web search request.
@ -240,9 +240,9 @@ class ConnectorTavily(WebSearchBase, WebCrawlBase, WebScrapeBase):
"""Calls the Tavily API to extract text content from URLs with retry logic."""
import asyncio
max_retries = get_web_crawl_max_retries()
retry_delay = get_web_crawl_retry_delay()
timeout = get_web_crawl_timeout()
max_retries = self.crawl_max_retries
retry_delay = self.crawl_retry_delay
timeout = self.crawl_timeout
for attempt in range(max_retries + 1):
try:

18
modules/features/init.py Normal file
View file

@ -0,0 +1,18 @@
# Launch features as events
import asyncio
import logging
from modules.interfaces.interfaceAppObjects import getRootInterface
# GET EVENT USER
logger = logging.getLogger(__name__)
eventUser = getRootInterface().getUserByUsername("event")
if not eventUser:
logger.error("Event user not found")
# LAUNCH FEATURES
from modules.features.syncDelta.mainSyncDelta import ManagerSyncDelta, performSync
managerSyncDelta = ManagerSyncDelta(eventUser)
asyncio.create_task(performSync(eventUser))

View file

@ -1,8 +1,8 @@
import logging
from typing import Any, Dict, List, Optional
from modules.interfaces.interfaceAppModel import User
from modules.services.serviceNeutralization.mainNeutralization import NeutralizationService
from modules.interfaces.interfaceAppModel import User, DataNeutralizerAttributes, DataNeutraliserConfig
from modules.services.serviceNeutralization.mainServiceNeutralization import NeutralizationService
logger = logging.getLogger(__name__)
@ -75,6 +75,53 @@ class NeutralizationPlayground:
'error': str(e),
}
# Additional methods needed by the route
def get_config(self) -> Optional[DataNeutraliserConfig]:
"""Get neutralization configuration"""
return self.service.getConfig()
def save_config(self, config_data: Dict[str, Any]) -> DataNeutraliserConfig:
"""Save neutralization configuration"""
return self.service.saveConfig(config_data)
def neutralize_text(self, text: str, file_id: str = None) -> Dict[str, Any]:
"""Neutralize text content"""
return self.service.processText(text)
def resolve_text(self, text: str) -> str:
"""Resolve UIDs in neutralized text back to original text"""
return self.service.resolveText(text)
def get_attributes(self, file_id: str = None) -> List[DataNeutralizerAttributes]:
"""Get neutralization attributes, optionally filtered by file ID"""
if not self.service.app_interface:
return []
try:
all_attributes = self.service._getAttributes()
if file_id:
return [attr for attr in all_attributes if attr.fileId == file_id]
return all_attributes
except Exception as e:
logger.error(f"Error getting attributes: {str(e)}")
return []
async def process_sharepoint_files(self, source_path: str, target_path: str) -> Dict[str, Any]:
"""Process files from SharePoint source path and store neutralized files in target path"""
return await self.processSharepointFiles(source_path, target_path)
def batch_neutralize_files(self, files_data: List[Dict[str, Any]]) -> Dict[str, Any]:
"""Process multiple files for neutralization"""
file_ids = [file_data.get('fileId') for file_data in files_data if file_data.get('fileId')]
return self.processFiles(file_ids)
def get_processing_stats(self) -> Dict[str, Any]:
"""Get neutralization processing statistics"""
return self.getStats()
def cleanup_file_attributes(self, file_id: str) -> bool:
"""Clean up neutralization attributes for a specific file"""
return self.cleanAttributes(file_id)
# Internal SharePoint helper module separated to keep feature logic tidy
class SharepointProcessor:

File diff suppressed because it is too large Load diff

View file

@ -564,20 +564,3 @@ register_model_labels(
"patternType": {"en": "Pattern Type", "fr": "Type de modèle"}
}
)
class SystemTable(BaseModel, ModelMixin):
"""Data model for system table entries"""
table_name: str = Field(
description="Name of the table",
frontend_type="text",
frontend_readonly=True,
frontend_required=True
)
initial_id: Optional[str] = Field(
default=None,
description="Initial ID for the table",
frontend_type="text",
frontend_readonly=True,
frontend_required=False
)

View file

@ -121,6 +121,7 @@ class AppObjects:
"""Initialize standard records if they don't exist."""
self._initRootMandate()
self._initAdminUser()
self._initEventUser()
def _initRootMandate(self):
"""Creates the Root mandate if it doesn't exist."""
@ -154,7 +155,7 @@ class AppObjects:
language="en",
privilege=UserPrivilege.SYSADMIN,
authenticationAuthority="local", # Using lowercase value directly
hashedPassword=self._getPasswordHash("The 1st Poweron Admin"), # Use a secure password in production!
hashedPassword=self._getPasswordHash(APP_CONFIG.get("APP_INIT_PASS_ADMIN_SECRET")),
connections=[]
)
createdUser = self.db.recordCreate(UserInDB, adminUser)
@ -164,6 +165,27 @@ class AppObjects:
self.currentUser = createdUser
self.userId = createdUser.get("id")
def _initEventUser(self):
"""Creates the Event user if it doesn't exist."""
# Check if event user already exists
existingUsers = self.db.getRecordset(UserInDB, recordFilter={"username": "event"})
if not existingUsers:
logger.info("Creating Event user")
eventUser = UserInDB(
mandateId=self.getInitialId(Mandate),
username="event",
email="event@example.com",
fullName="Event",
enabled=True,
language="en",
privilege=UserPrivilege.SYSADMIN,
authenticationAuthority="local", # Using lowercase value directly
hashedPassword=self._getPasswordHash(APP_CONFIG.get("APP_INIT_PASS_EVENT_SECRET")),
connections=[]
)
createdUser = self.db.recordCreate(UserInDB, eventUser)
logger.info(f"Event user created with ID {createdUser['id']}")
def _uam(self, model_class: type, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""
Unified user access management function that filters data based on user privileges
@ -1075,34 +1097,6 @@ def getInterface(currentUser: User) -> AppObjects:
return _gatewayInterfaces[contextKey]
def getRootUser() -> User:
"""
Returns the root user from the database.
This is the user with the initial ID in the users table.
"""
try:
# Create a temporary interface without user context
tempInterface = AppObjects()
# Get the initial user directly
initialUserId = tempInterface.getInitialId(UserInDB)
if not initialUserId:
raise ValueError("No initial user ID found in database")
users = tempInterface.db.getRecordset(UserInDB, recordFilter={"id": initialUserId})
if not users:
raise ValueError("Initial user not found in database")
# Convert to User model and return the model instance
user_data = users[0]
return User.parse_obj(user_data)
except Exception as e:
logger.error(f"Error getting root user: {str(e)}")
raise ValueError(f"Failed to get root user: {str(e)}")
def getRootInterface() -> AppObjects:
"""
Returns a AppObjects instance with root privileges.
@ -1111,7 +1105,28 @@ def getRootInterface() -> AppObjects:
global _rootAppObjects
if _rootAppObjects is None:
rootUser = getRootUser()
_rootAppObjects = AppObjects(rootUser)
try:
# Create a temporary interface without user context to get root user
tempInterface = AppObjects()
# Get the initial user directly
initialUserId = tempInterface.getInitialId(UserInDB)
if not initialUserId:
raise ValueError("No initial user ID found in database")
users = tempInterface.db.getRecordset(UserInDB, recordFilter={"id": initialUserId})
if not users:
raise ValueError("Initial user not found in database")
# Convert to User model
user_data = users[0]
rootUser = User.parse_obj(user_data)
# Create root interface with the root user
_rootAppObjects = AppObjects(rootUser)
except Exception as e:
logger.error(f"Error getting root user: {str(e)}")
raise ValueError(f"Failed to get root user: {str(e)}")
return _rootAppObjects

View file

@ -6,8 +6,8 @@ from abc import ABC, abstractmethod
class TicketFieldAttribute(BaseModel):
field_name: str = Field(description="Human-readable field name")
field: str = Field(description="JIRA field ID/key")
fieldName: str = Field(description="Human-readable field name")
field: str = Field(description="Ticket field ID/key")
class Task(BaseModel):

File diff suppressed because it is too large Load diff

View file

@ -33,12 +33,9 @@ class WebInterface:
@classmethod
async def create(cls) -> "WebInterface":
connectorWebTavily = await ConnectorTavily.create()
return WebInterface(connectorWebTavily=connectorWebTavily)
async def search(
self, web_search_request: WebSearchRequest
) -> WebSearchActionResult:
async def search(self, web_search_request: WebSearchRequest) -> WebSearchActionResult:
# NOTE: Add connectors here
return await self.connectorWebTavily.search_urls(web_search_request)
@ -46,9 +43,7 @@ class WebInterface:
# NOTE: Add connectors here
return await self.connectorWebTavily.crawl_urls(web_crawl_request)
async def scrape(
self, web_scrape_request: WebScrapeRequest
) -> WebScrapeActionResult:
async def scrape(self, web_scrape_request: WebScrapeRequest) -> WebScrapeActionResult:
# NOTE: Add connectors here
return await self.connectorWebTavily.scrape(web_scrape_request)

View file

@ -7,7 +7,7 @@ from modules.security.auth import limiter, getCurrentUser
# Import interfaces
from modules.interfaces.interfaceAppModel import User, DataNeutraliserConfig, DataNeutralizerAttributes
from modules.features.neutralization.mainNeutralizationPlayground import NeutralizationService
from modules.features.neutralizePlayground.mainNeutralizePlayground import NeutralizationPlayground
# Configure logger
logger = logging.getLogger(__name__)
@ -33,7 +33,7 @@ async def get_neutralization_config(
) -> DataNeutraliserConfig:
"""Get data neutralization configuration"""
try:
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
config = service.get_config()
if not config:
@ -67,7 +67,7 @@ async def save_neutralization_config(
) -> DataNeutraliserConfig:
"""Save or update data neutralization configuration"""
try:
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
config = service.save_config(config_data)
return config
@ -97,7 +97,7 @@ async def neutralize_text(
detail="Text content is required"
)
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
result = service.neutralize_text(text, file_id)
return result
@ -128,7 +128,7 @@ async def resolve_text(
detail="Text content is required"
)
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
resolved_text = service.resolve_text(text)
return {"resolved_text": resolved_text}
@ -151,7 +151,7 @@ async def get_neutralization_attributes(
) -> List[DataNeutralizerAttributes]:
"""Get neutralization attributes, optionally filtered by file ID"""
try:
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
attributes = service.get_attributes(fileId)
return attributes
@ -181,7 +181,7 @@ async def process_sharepoint_files(
detail="Both source and target paths are required"
)
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
result = await service.process_sharepoint_files(source_path, target_path)
return result
@ -210,7 +210,7 @@ async def batch_process_files(
detail="Files data is required"
)
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
result = service.batch_neutralize_files(files_data)
return result
@ -232,7 +232,7 @@ async def get_neutralization_stats(
) -> Dict[str, Any]:
"""Get neutralization processing statistics"""
try:
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
stats = service.get_processing_stats()
return stats
@ -253,7 +253,7 @@ async def cleanup_file_attributes(
) -> Dict[str, str]:
"""Clean up neutralization attributes for a specific file"""
try:
service = NeutralizationService(currentUser)
service = NeutralizationPlayground(currentUser)
success = service.cleanup_file_attributes(fileId)
if success:

View file

@ -51,17 +51,19 @@ class CSRFMiddleware(BaseHTTPMiddleware):
csrf_token = request.headers.get("X-CSRF-Token")
if not csrf_token:
logger.warning(f"CSRF token missing for {request.method} {request.url.path}")
raise HTTPException(
from fastapi.responses import JSONResponse
return JSONResponse(
status_code=status.HTTP_403_FORBIDDEN,
detail="CSRF token missing"
content={"detail": "CSRF token missing"}
)
# Validate CSRF token format (basic validation)
if not self._is_valid_csrf_token(csrf_token):
logger.warning(f"Invalid CSRF token format for {request.method} {request.url.path}")
raise HTTPException(
from fastapi.responses import JSONResponse
return JSONResponse(
status_code=status.HTTP_403_FORBIDDEN,
detail="Invalid CSRF token format"
content={"detail": "Invalid CSRF token format"}
)
# Additional CSRF validation could be added here:

View file

@ -39,60 +39,44 @@ class PublicService:
class Services:
def __init__(self, user: User, workflow: ChatWorkflow):
def __init__(self, user: User, workflow: ChatWorkflow = None):
self.user: User = user
self.workflow: ChatWorkflow = workflow
# Directly expose existing service modules
# Initialize interfaces
from modules.interfaces.interfaceChatObjects import getInterface as getChatInterface
self.interfaceChat = getChatInterface(user)
from modules.interfaces.interfaceAppObjects import getInterface as getAppInterface
self.interfaceApp = getAppInterface(user)
from modules.interfaces.interfaceComponentObjects import getInterface as getComponentInterface
self.interfaceComponent = getComponentInterface(user)
# Initialize service packages
from .serviceDocument.mainServiceDocumentExtraction import DocumentExtractionService
self.document = PublicService(DocumentExtractionService(self))
self.documentExtraction = PublicService(DocumentExtractionService(self))
from .serviceDocument.mainServiceDocumentGeneration import DocumentGenerationService
self.document = PublicService(DocumentGenerationService(self))
self.documentGeneration = PublicService(DocumentGenerationService(self))
from .serviceNeutralization.mainNeutralization import NeutralizationService
self.neutralization = PublicService(NeutralizationService())
from .serviceNeutralization.mainServiceNeutralization import NeutralizationService
self.neutralization = PublicService(NeutralizationService(self))
from .serviceSharepoint.mainSharepoint import SharePointService
self.sharepoint = PublicService(SharePointService(self))
from .serviceSharepoint.mainServiceSharepoint import SharepointService
self.sharepoint = PublicService(SharepointService(self))
from .serviceAi.mainServiceAi import AiService
self.ai = PublicService(AiService(self))
from .serviceWorkflows.mainServiceWorkflows import WorkflowService
from .serviceTicket.mainServiceTicket import TicketService
self.ticket = PublicService(TicketService(self))
from .serviceWorkflow.mainServiceWorkflow import WorkflowService
self.workflow = PublicService(WorkflowService(self))
# Initialize chat interface for workflow operations
from modules.interfaces.interfaceChatObjects import getInterface as getChatInterface
self.chatInterface = getChatInterface(user)
# Chat interface wrapper methods
def getWorkflow(self, workflowId: str):
return self.chatInterface.getWorkflow(workflowId)
def createWorkflow(self, workflowData: dict):
return self.chatInterface.createWorkflow(workflowData)
def updateWorkflow(self, workflowId: str, workflowData: dict):
return self.chatInterface.updateWorkflow(workflowId, workflowData)
def createMessage(self, messageData: dict):
return self.chatInterface.createMessage(messageData)
def updateMessage(self, messageId: str, messageData: dict):
return self.chatInterface.updateMessage(messageId, messageData)
def createLog(self, logData: dict):
return self.chatInterface.createLog(logData)
def updateWorkflowStats(self, workflowId: str, bytesSent: int = 0, bytesReceived: int = 0, tokenCount: int = 0):
return self.chatInterface.updateWorkflowStats(workflowId, bytesSent, bytesReceived, tokenCount)
@property
def mandateId(self):
return self.chatInterface.mandateId
def getInterface(user: User, workflow: ChatWorkflow) -> Services:
return Services(user, workflow)

View file

@ -2,7 +2,7 @@ import logging
from typing import Dict, Any, List, Optional, Tuple
from modules.interfaces.interfaceChatModel import ChatDocument
from modules.services.serviceDocument.documentExtraction import DocumentExtractionService
from modules.services.serviceDocument.mainServiceDocumentExtraction import DocumentExtractionService
from modules.interfaces.interfaceAiModel import AiCallRequest, AiCallOptions
from modules.interfaces.interfaceAiObjects import AiObjects
@ -19,9 +19,15 @@ class AiService:
The concrete connector instances (OpenAI/Anthropic) are injected by the interface layer.
"""
def __init__(self, aiObjects: AiObjects | None = None) -> None:
def __init__(self, serviceCenter=None) -> None:
"""Initialize AI service with service center access.
Args:
serviceCenter: Service center instance for accessing other services
"""
self.serviceCenter = serviceCenter
# Only depend on interfaces
self.aiObjects = aiObjects or AiObjects()
self.aiObjects = AiObjects()
self.documentExtractor = DocumentExtractionService()
async def callAi(

View file

@ -22,7 +22,7 @@ from modules.interfaces.interfaceChatModel import (
ContentItem,
ContentMetadata
)
from modules.services.serviceNeutralization.mainNeutralization import NeutralizationService
from modules.services.serviceNeutralization.mainServiceNeutralization import NeutralizationService
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
@ -43,9 +43,8 @@ class DocumentExtractionService:
"""Initialize the document processor."""
self._neutralizer = NeutralizationService() if APP_CONFIG.get("ENABLE_CONTENT_NEUTRALIZATION", False) else None
self._serviceCenter = serviceCenter
# Centralized services interface (for AI)
from modules.services import getInterface as getServices
self.services = getServices(serviceCenter.user, serviceCenter.workflow)
# Store service center for access to user/workflow context when needed
self.services = None # Will be set to None to avoid circular dependency
self.supportedTypes: Dict[str, Callable[[bytes, str, str], Awaitable[List[ContentItem]]]] = {
# Text and data files
@ -1427,17 +1426,21 @@ class DocumentExtractionService:
"""
from modules.interfaces.interfaceChatModel import ChatDocument
image_doc = ChatDocument(fileData=chunk, fileName="image", mimeType=mimeType)
processedContent = await self.services.ai.callAi(
prompt=imagePrompt,
documents=[image_doc],
options={
"process_type": "image",
"operation_type": "analyse_content",
"priority": "balanced",
"compress_documents": True,
"max_cost": 0.03
}
)
# Use direct import to avoid circular dependency
from modules.services.serviceAi.mainServiceAi import AiService
from modules.interfaces.interfaceAiObjects import AiObjects
aiService = AiService(AiObjects())
processedContent = await aiService.callAi(
prompt=imagePrompt,
documents=[image_doc],
options={
"process_type": "image",
"operation_type": "analyse_content",
"priority": "balanced",
"compress_documents": True,
"max_cost": 0.03
}
)
else:
# For text content (including SVG), use text AI service
# Neutralize content if neutralizer is enabled (only for text)
@ -1462,36 +1465,23 @@ class DocumentExtractionService:
# For code files, preserve the complete content without AI processing
processedContent = contentToProcess
else:
if self.services and hasattr(self.services, 'ai'):
processedContent = await self.services.ai.callAi(
prompt=aiPrompt,
documents=None,
options={
"process_type": "text",
"operation_type": "analyse_content",
"priority": "balanced",
"compress_prompt": True,
"compress_documents": False,
"processing_mode": "advanced",
"max_cost": 0.05,
"max_processing_time": 30
}
)
else:
# Fallback to basic AI processing with centralized service
processedContent = await self.services.ai.callAi(
prompt=aiPrompt,
documents=None,
options={
"process_type": "text",
"operation_type": "analyse_content",
"priority": "speed",
"compress_prompt": True,
"compress_documents": False,
"max_cost": 0.01,
"max_processing_time": 15
}
)
# Use direct import to avoid circular dependency
from modules.services.serviceAi.mainServiceAi import AiService
from modules.interfaces.interfaceAiObjects import AiObjects
aiService = AiService(AiObjects())
processedContent = await aiService.callAi(
prompt=aiPrompt,
documents=None,
options={
"process_type": "text",
"operation_type": "analyse_content",
"priority": "speed",
"compress_prompt": True,
"compress_documents": False,
"max_cost": 0.01,
"max_processing_time": 15
}
)
chunkResults.append(processedContent)
except Exception as aiError:

View file

@ -8,59 +8,51 @@ Mehrsprachig: DE, EN, FR, IT
import logging
import re
import os
import uuid
import json
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime
from pathlib import Path
import mimetypes
from typing import Dict, List, Any, Optional
from modules.interfaces.interfaceAppObjects import getInterface
from modules.interfaces.interfaceAppModel import User, DataNeutraliserConfig, DataNeutralizerAttributes
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.interfaces.interfaceAppModel import DataNeutraliserConfig, DataNeutralizerAttributes
# Import all necessary classes and functions for neutralization
from modules.services.serviceNeutralization.subProcessCommon import ProcessResult, CommonUtils, NeutralizationResult, NeutralizationAttribute
from modules.services.serviceNeutralization.subProcessCommon import CommonUtils, NeutralizationResult, NeutralizationAttribute
from modules.services.serviceNeutralization.subProcessText import TextProcessor, PlainText
from modules.services.serviceNeutralization.subProcessList import ListProcessor, TableData
from modules.services.serviceNeutralization.subProcessBinary import BinaryProcessor, BinaryData
from modules.services.serviceNeutralization.subParseString import StringParser
from modules.services.serviceNeutralization.subPatterns import Pattern, HeaderPatterns, DataPatterns, TextTablePatterns
from modules.services.serviceNeutralization.subProcessBinary import BinaryProcessor
from modules.services.serviceNeutralization.subPatterns import HeaderPatterns, DataPatterns, TextTablePatterns
logger = logging.getLogger(__name__)
class NeutralizationService:
"""Service for handling data neutralization operations"""
def __init__(self, current_user: User = None, names_to_parse: List[str] = None):
def __init__(self, serviceCenter=None, NamesToParse: List[str] = None):
"""Initialize the service with user context and anonymization processors
Args:
current_user: User object for context (optional for basic neutralization)
names_to_parse: List of names to parse and replace (case-insensitive)
serviceCenter: Service center instance for accessing other services
NamesToParse: List of names to parse and replace (case-insensitive)
"""
self.current_user = current_user
self.app_interface = getInterface(current_user) if current_user else None
self.serviceCenter = serviceCenter
self.interfaceApp = serviceCenter.interfaceApp
# Initialize anonymization processors
self.names_to_parse = names_to_parse or []
self.textProcessor = TextProcessor(names_to_parse)
self.listProcessor = ListProcessor(names_to_parse)
self.NamesToParse = NamesToParse or []
self.textProcessor = TextProcessor(NamesToParse)
self.listProcessor = ListProcessor(NamesToParse)
self.binaryProcessor = BinaryProcessor()
self.commonUtils = CommonUtils()
def getConfig(self) -> Optional[DataNeutraliserConfig]:
"""Get the neutralization configuration for the current user's mandate"""
if not self.app_interface:
if not self.interfaceApp:
return None
return self.app_interface.getNeutralizationConfig()
return self.interfaceApp.getNeutralizationConfig()
def saveConfig(self, config_data: Dict[str, Any]) -> DataNeutraliserConfig:
"""Save or update the neutralization configuration"""
if not self.app_interface:
if not self.interfaceApp:
raise ValueError("User context required for saving configuration")
return self.app_interface.createOrUpdateNeutralizationConfig(config_data)
return self.interfaceApp.createOrUpdateNeutralizationConfig(config_data)
# Public API: process text or file
@ -70,18 +62,18 @@ class NeutralizationService:
def processFile(self, fileId: str) -> Dict[str, Any]:
"""Neutralize a file referenced by its fileId using app interface."""
if not self.app_interface:
if not self.interfaceApp:
raise ValueError("User context is required to process a file by fileId")
# Fetch file data and metadata
fileInfo = None
try:
# getFile returns an object; fallback to dict-like
fileInfo = self.app_interface.getFile(fileId)
fileInfo = self.interfaceApp.getFile(fileId)
except Exception:
fileInfo = None
fileName = getattr(fileInfo, 'fileName', None) if fileInfo else None
mimeType = getattr(fileInfo, 'mimeType', None) if fileInfo else None
fileData = self.app_interface.getFileData(fileId)
fileData = self.interfaceApp.getFileData(fileId)
if not fileData:
raise ValueError(f"No file data found for fileId: {fileId}")
@ -111,17 +103,17 @@ class NeutralizationService:
return result
def resolveText(self, text: str) -> str:
if not self.app_interface:
if not self.interfaceApp:
return text
try:
placeholder_pattern = r'\[([a-z]+)\.([a-f0-9-]{36})\]'
matches = re.findall(placeholder_pattern, text)
resolved_text = text
for placeholder_type, uid in matches:
attributes = self.app_interface.db.getRecordset(
attributes = self.interfaceApp.db.getRecordset(
DataNeutralizerAttributes,
recordFilter={
"mandateId": self.app_interface.mandateId,
"mandateId": self.interfaceApp.mandateId,
"id": uid
}
)
@ -194,6 +186,19 @@ class NeutralizationService:
processed_info={'type': 'error', 'error': str(e)}
).model_dump()
def _getAttributes(self) -> List[DataNeutralizerAttributes]:
"""Get all neutralization attributes for the current user's mandate"""
if not self.interfaceApp:
return []
try:
return self.interfaceApp.db.getRecordset(
DataNeutralizerAttributes,
recordFilter={"mandateId": self.interfaceApp.mandateId}
)
except Exception as e:
logger.error(f"Error getting neutralization attributes: {str(e)}")
return []
def _getContentTypeFromMime(self, mime_type: str) -> str:
"""Determine content type from MIME type for neutralization processing"""
if mime_type.startswith('text/'):

View file

@ -11,15 +11,15 @@ from modules.services.serviceNeutralization.subPatterns import DataPatterns, fin
class StringParser:
"""Handles string parsing and replacement operations"""
def __init__(self, names_to_parse: List[str] = None):
def __init__(self, NamesToParse: List[str] = None):
"""
Initialize the string parser
Args:
names_to_parse: List of names to parse and replace (case-insensitive)
NamesToParse: List of names to parse and replace (case-insensitive)
"""
self.data_patterns = DataPatterns.patterns
self.names_to_parse = names_to_parse or []
self.NamesToParse = NamesToParse or []
self.mapping = {}
def is_placeholder(self, text: str) -> bool:
@ -84,7 +84,7 @@ class StringParser:
Returns:
str: Text with custom names replaced
"""
for name in self.names_to_parse:
for name in self.NamesToParse:
if not name.strip():
continue

View file

@ -22,14 +22,14 @@ class TableData:
class ListProcessor:
"""Handles structured data processing with headers for anonymization"""
def __init__(self, names_to_parse: List[str] = None):
def __init__(self, NamesToParse: List[str] = None):
"""
Initialize the list processor
Args:
names_to_parse: List of names to parse and replace
NamesToParse: List of names to parse and replace
"""
self.string_parser = StringParser(names_to_parse)
self.string_parser = StringParser(NamesToParse)
self.header_patterns = HeaderPatterns.patterns
def anonymize_table(self, table: TableData) -> TableData:
@ -215,7 +215,7 @@ class ListProcessor:
text = self.string_parser.mapping[text]
else:
# Check if text matches any custom names from the user list
for name in self.string_parser.names_to_parse:
for name in self.string_parser.NamesToParse:
if not name.strip():
continue
if text.lower().strip() == name.lower().strip():

View file

@ -16,14 +16,14 @@ class PlainText:
class TextProcessor:
"""Handles plain text processing for anonymization"""
def __init__(self, names_to_parse: List[str] = None):
def __init__(self, NamesToParse: List[str] = None):
"""
Initialize the text processor
Args:
names_to_parse: List of names to parse and replace
NamesToParse: List of names to parse and replace
"""
self.string_parser = StringParser(names_to_parse)
self.string_parser = StringParser(NamesToParse)
def extract_tables_from_text(self, content: str) -> tuple:
"""

View file

@ -13,18 +13,55 @@ logger = logging.getLogger(__name__)
class SharepointService:
"""SharePoint connector using Microsoft Graph API for reliable authentication."""
def __init__(self, access_token: str):
"""Initialize with access token.
def __init__(self, serviceCenter=None):
"""Initialize SharePoint service without access token.
Args:
access_token: Microsoft Graph access token
serviceCenter: Service center instance for accessing other services
Use setAccessToken() method to configure the access token before making API calls.
"""
self.access_token = access_token
self.serviceCenter = serviceCenter
self.access_token = None
self.base_url = "https://graph.microsoft.com/v1.0"
def setAccessToken(self, userConnection, interfaceApp) -> bool:
"""Set access token from UserConnection.
Args:
userConnection: UserConnection object containing token information
interfaceApp: InterfaceApp instance used by TokenManager to resolve the token
Returns:
bool: True if token was set successfully, False otherwise
"""
try:
if not userConnection:
logger.error("UserConnection is required to set access token")
return False
# Get a fresh token for this specific connection
from modules.security.tokenManager import TokenManager
token = TokenManager().getFreshToken(interfaceApp, userConnection.id)
if not token:
logger.error(f"No token found for connection {userConnection.id}")
return False
self.access_token = token.tokenAccess
logger.info(f"Access token set for connection {userConnection.id}")
return True
except Exception as e:
logger.error(f"Error setting access token: {str(e)}")
return False
async def _make_graph_api_call(self, endpoint: str, method: str = "GET", data: bytes = None) -> Dict[str, Any]:
"""Make a Microsoft Graph API call with proper error handling."""
try:
if self.access_token is None:
logger.error("Access token is not set. Please call setAccessToken() before using the SharePoint service.")
return {"error": "Access token is not set. Please call setAccessToken() before using the SharePoint service."}
headers = {
"Authorization": f"Bearer {self.access_token}",
"Content-Type": "application/json" if data and method != "PUT" else "application/octet-stream" if data else "application/json"
@ -280,6 +317,10 @@ class SharepointService:
async def download_file(self, site_id: str, file_id: str) -> Optional[bytes]:
"""Download a file from SharePoint."""
try:
if self.access_token is None:
logger.error("Access token is not set. Please call setAccessToken() before using the SharePoint service.")
return None
endpoint = f"sites/{site_id}/drive/items/{file_id}/content"
headers = {"Authorization": f"Bearer {self.access_token}"}
@ -416,6 +457,10 @@ class SharepointService:
async def download_file_by_path(self, site_id: str, file_path: str) -> Optional[bytes]:
"""Download a file by its path within a site."""
try:
if self.access_token is None:
logger.error("Access token is not set. Please call setAccessToken() before using the SharePoint service.")
return None
# Clean the path
clean_path = file_path.strip('/')
endpoint = f"sites/{site_id}/drive/root:/{clean_path}:/content"

View file

@ -0,0 +1,41 @@
"""Ticket service for creating ticket interfaces."""
import logging
from typing import Dict, Any, Optional
from modules.interfaces.interfaceTicketObjects import createTicketInterfaceByType
logger = logging.getLogger(__name__)
class TicketService:
"""Service class for ticket interface operations."""
def __init__(self, serviceCenter=None):
"""Initialize ticket service with service center access.
Args:
serviceCenter: Service center instance for accessing other services
"""
self.serviceCenter = serviceCenter
async def createTicketInterfaceByType(
self,
taskSyncDefinition: Dict[str, Any],
connectorType: str,
connectorParams: Optional[Dict[str, Any]] = None
):
"""Create a ticket interface by type with the given parameters.
Args:
taskSyncDefinition: Field mapping definition for ticket synchronization
connectorType: Type of connector (e.g., "Jira", "ServiceNow")
connectorParams: Optional parameters for the connector
Returns:
Ticket interface instance
"""
return await createTicketInterfaceByType(
taskSyncDefinition=taskSyncDefinition,
connectorType=connectorType,
connectorParams=connectorParams
)

View file

@ -3,7 +3,7 @@ import uuid
from typing import Dict, Any, List, Optional
from modules.interfaces.interfaceAppModel import User, UserConnection
from modules.interfaces.interfaceChatModel import ChatDocument, ChatMessage, ExtractedContent
from modules.services.serviceDocument.documentExtraction import DocumentExtractionService
from modules.services.serviceDocument.mainServiceDocumentExtraction import DocumentExtractionService
from modules.services.serviceDocument.documentUtility import getFileExtension, getMimeTypeFromExtension, detectContentTypeFromData
from modules.shared.timezoneUtils import get_utc_timestamp
@ -12,17 +12,13 @@ logger = logging.getLogger(__name__)
class WorkflowService:
"""Service class containing methods for document processing, chat operations, and workflow management"""
def __init__(self, service_center):
self.service_center = service_center
self.user = service_center.user
self.workflow = service_center.workflow
self.interfaceChat = service_center.interfaceChat
self.interfaceComponent = service_center.interfaceComponent
self.interfaceApp = service_center.interfaceApp
self.documentProcessor = service_center.documentProcessor
# Centralized services interface (for AI)
from modules.services import getInterface as getServices
self.services = getServices(self.user, self.workflow)
def __init__(self, serviceCenter):
self.serviceCenter = serviceCenter
self.user = serviceCenter.user
self.workflow = serviceCenter.workflow
self.interfaceChat = serviceCenter.interfaceChat
self.interfaceComponent = serviceCenter.interfaceComponent
self.interfaceApp = serviceCenter.interfaceApp
async def summarizeChat(self, messages: List[ChatMessage]) -> str:
"""
@ -57,8 +53,10 @@ class WorkflowService:
Please provide a comprehensive summary of this conversation."""
# Get summary using centralized AI (speed priority)
return await self.services.ai.callAi(
# Get summary using AI service directly (avoiding circular dependency)
from modules.services.serviceAi.mainServiceAi import AiService
ai_service = AiService(self)
return await ai_service.callAi(
prompt=prompt,
documents=None,
options={
@ -251,6 +249,22 @@ class WorkflowService:
logger.debug(f"getConnectionReferenceFromUserConnection: Built reference: {base_ref + state_info}")
return base_ref + state_info
def getUserConnectionByExternalUsername(self, authority: str, externalUsername: str) -> Optional[UserConnection]:
"""Fetch the user's connection by authority and external username."""
try:
if not authority or not externalUsername:
return None
user_connections = self.interfaceApp.getUserConnections(self.user.id)
for connection in user_connections:
# Normalize authority for comparison (enum vs string)
connection_authority = connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority)
if connection_authority.lower() == authority.lower() and connection.externalUsername == externalUsername:
return connection
return None
except Exception as e:
logger.error(f"Error getting connection by external username: {str(e)}")
return None
def getUserConnectionFromConnectionReference(self, connectionReference: str) -> Optional[UserConnection]:
"""Get UserConnection from reference string (handles both old and enhanced formats)"""
@ -334,14 +348,34 @@ class WorkflowService:
# Recovery failed - don't continue with invalid data
raise RuntimeError(f"Document {document.id} properties are inaccessible and recovery failed. Diagnosis: {diagnosis}")
# Process with document processor directly
extractedContent = await self.documentProcessor.processFileData(
# Process with DocumentExtractionService directly (no circular dependency)
from modules.services.serviceDocument.mainServiceDocumentExtraction import DocumentExtractionService
docService = DocumentExtractionService(None) # Pass None to avoid circular dependency
content_items = await docService.processFileData(
fileData=fileData,
fileName=fileName,
mimeType=mimeType,
base64Encoded=False,
prompt=prompt,
documentId=document.id
enableAI=True
)
# Convert ContentItem list to ExtractedContent
contents = []
for item in content_items:
contents.append({
'label': item.label,
'data': item.data,
'metadata': {
'mimeType': item.metadata.mimeType if hasattr(item.metadata, 'mimeType') else mimeType,
'size': item.metadata.size if hasattr(item.metadata, 'size') else len(fileData),
'base64Encoded': item.metadata.base64Encoded if hasattr(item.metadata, 'base64Encoded') else False
}
})
extractedContent = ExtractedContent(
id=document.id,
contents=contents
)
# Note: ExtractedContent model only has 'id' and 'contents' fields

View file

@ -32,7 +32,7 @@ class MethodSharepoint(MethodBase):
return datetime.now(UTC).strftime("%Y%m%d-%H%M%S")
def _getMicrosoftConnection(self, connectionReference: str) -> Optional[Dict[str, Any]]:
"""Get Microsoft connection from connection reference"""
"""Get Microsoft connection from connection reference and configure SharePoint service"""
try:
userConnection = self.service.getUserConnectionFromConnectionReference(connectionReference)
if not userConnection:
@ -48,47 +48,33 @@ class MethodSharepoint(MethodBase):
logger.warning(f"Connection {userConnection.id} status is not active/pending: {userConnection.status.value}")
return None
# Get a fresh token for this specific connection
from modules.security.tokenManager import TokenManager
token = TokenManager().getFreshToken(self.service.interfaceApp, userConnection.id)
if not token:
logger.warning(f"No token found for connection {userConnection.id}")
# Configure SharePoint service with the UserConnection
if not self.service.sharepoint.setAccessToken(userConnection, self.service.interfaceApp):
logger.warning(f"Failed to configure SharePoint service with connection {userConnection.id}")
return None
# Check if token is expired
if hasattr(token, 'expiresAt') and token.expiresAt:
current_time = get_utc_timestamp()
if current_time > token.expiresAt:
logger.warning(f"Token for connection {userConnection.id} is expired (expiresAt: {token.expiresAt}, current: {current_time})")
return None
logger.info(f"Successfully retrieved Microsoft connection: {userConnection.id}, status: {userConnection.status.value}, externalId: {userConnection.externalId}")
logger.info(f"Successfully configured SharePoint service with Microsoft connection: {userConnection.id}, status: {userConnection.status.value}, externalId: {userConnection.externalId}")
return {
"id": userConnection.id,
"userConnection": userConnection,
"accessToken": token.tokenAccess,
"refreshToken": token.tokenRefresh,
"scopes": ["Sites.ReadWrite.All", "Files.ReadWrite.All", "User.Read"] # SharePoint scopes
}
except Exception as e:
logger.error(f"Error getting Microsoft connection: {str(e)}")
return None
async def _discoverSharePointSites(self, access_token: str) -> List[Dict[str, Any]]:
async def _discoverSharePointSites(self) -> List[Dict[str, Any]]:
"""
Discover all SharePoint sites accessible to the user via Microsoft Graph API
Parameters:
access_token (str): Microsoft Graph access token
Returns:
List[Dict[str, Any]]: List of SharePoint site information
"""
try:
# Query Microsoft Graph to get all sites the user has access to
endpoint = "sites?search=*"
result = await self._makeGraphApiCall(access_token, endpoint)
result = await self._makeGraphApiCall(endpoint)
if "error" in result:
logger.error(f"Error discovering SharePoint sites: {result['error']}")
@ -375,11 +361,14 @@ class MethodSharepoint(MethodBase):
logger.error(f"Error parsing site URL {siteUrl}: {str(e)}")
return {"hostname": "", "sitePath": ""}
async def _makeGraphApiCall(self, access_token: str, endpoint: str, method: str = "GET", data: bytes = None) -> Dict[str, Any]:
async def _makeGraphApiCall(self, endpoint: str, method: str = "GET", data: bytes = None) -> Dict[str, Any]:
"""Make a Microsoft Graph API call with timeout and detailed logging"""
try:
if not hasattr(self.service, 'sharepoint') or not self.service.sharepoint._target.access_token:
return {"error": "SharePoint service not configured with access token"}
headers = {
"Authorization": f"Bearer {access_token}",
"Authorization": f"Bearer {self.service.sharepoint._target.access_token}",
"Content-Type": "application/json" if data and method != "PUT" else "application/octet-stream" if data else "application/json"
}
@ -436,11 +425,11 @@ class MethodSharepoint(MethodBase):
logger.error(f"Error making Graph API call: {str(e)}")
return {"error": f"Error making Graph API call: {str(e)}"}
async def _getSiteId(self, access_token: str, hostname: str, site_path: str) -> str:
async def _getSiteId(self, hostname: str, site_path: str) -> str:
"""Get SharePoint site ID from hostname and site path"""
try:
endpoint = f"sites/{hostname}:/{site_path}"
result = await self._makeGraphApiCall(access_token, endpoint)
result = await self._makeGraphApiCall(endpoint)
if "error" in result:
logger.error(f"Error getting site ID: {result['error']}")
@ -482,7 +471,7 @@ class MethodSharepoint(MethodBase):
# Discover SharePoint sites - use targeted approach when site parameter is provided
if site:
# When site parameter is provided, discover all sites first, then filter
all_sites = await self._discoverSharePointSites(connection["accessToken"])
all_sites = await self._discoverSharePointSites()
if not all_sites:
return ActionResult.isFailure(error="No SharePoint sites found or accessible")
@ -492,7 +481,7 @@ class MethodSharepoint(MethodBase):
return ActionResult.isFailure(error=f"No SharePoint sites found matching '{site}'")
else:
# No site parameter - discover all sites
sites = await self._discoverSharePointSites(connection["accessToken"])
sites = await self._discoverSharePointSites()
if not sites:
return ActionResult.isFailure(error="No SharePoint sites found or accessible")
@ -535,7 +524,6 @@ class MethodSharepoint(MethodBase):
# Use global search endpoint (site-specific search not available)
unified_result = await self._makeGraphApiCall(
connection["accessToken"],
"search/query",
method="POST",
data=json.dumps(payload).encode("utf-8")
@ -707,7 +695,7 @@ class MethodSharepoint(MethodBase):
logger.info(f"Using search API for files with query: '{search_query}'")
# Make the search API call (files)
search_result = await self._makeGraphApiCall(connection["accessToken"], endpoint)
search_result = await self._makeGraphApiCall(endpoint)
if "error" in search_result:
logger.warning(f"Search failed for site {site_name}: {search_result['error']}")
continue
@ -942,7 +930,7 @@ class MethodSharepoint(MethodBase):
return ActionResult.isFailure(error=f"Invalid pathQuery '{pathQuery}'. This appears to be search terms, not a valid SharePoint path. Use findDocumentPath action first to search for folders, then use the returned folder path as pathQuery.")
# For pathQuery, we need to discover sites to find the specific one
sites = await self._discoverSharePointSites(connection["accessToken"])
sites = await self._discoverSharePointSites()
if not sites:
return ActionResult.isFailure(error="No SharePoint sites found or accessible")
else:
@ -975,7 +963,7 @@ class MethodSharepoint(MethodBase):
search_query = fileName.replace("'", "''") # Escape single quotes for OData
endpoint = f"sites/{site_id}/drive/root/search(q='{search_query}')"
search_result = await self._makeGraphApiCall(connection["accessToken"], endpoint)
search_result = await self._makeGraphApiCall(endpoint)
if "error" in search_result:
continue
@ -988,7 +976,7 @@ class MethodSharepoint(MethodBase):
file_endpoint = f"sites/{site_id}/drive/items/{file_id}"
# Get file metadata
file_info_result = await self._makeGraphApiCall(connection["accessToken"], file_endpoint)
file_info_result = await self._makeGraphApiCall(file_endpoint)
if "error" in file_info_result:
continue
@ -1027,7 +1015,7 @@ class MethodSharepoint(MethodBase):
# For content download, we need to handle binary data
try:
async with aiohttp.ClientSession() as session:
headers = {"Authorization": f"Bearer {connection['accessToken']}"}
headers = {"Authorization": f"Bearer {self.service.sharepoint._target.access_token}"}
async with session.get(f"https://graph.microsoft.com/v1.0/{content_endpoint}", headers=headers) as response:
if response.status == 200:
content = await response.text()
@ -1280,7 +1268,7 @@ class MethodSharepoint(MethodBase):
return ActionResult.isFailure(error=f"Invalid pathQuery '{upload_path}'. This appears to be search terms, not a valid SharePoint path. Use findDocumentPath action first to search for folders, then use the returned folder path as pathQuery.")
# For pathQuery, we need to discover sites to find the specific one
sites = await self._discoverSharePointSites(connection["accessToken"])
sites = await self._discoverSharePointSites()
if not sites:
return ActionResult.isFailure(error="No SharePoint sites found or accessible")
@ -1368,7 +1356,6 @@ class MethodSharepoint(MethodBase):
# Upload the file
upload_result = await self._makeGraphApiCall(
connection["accessToken"],
upload_endpoint,
method="PUT",
data=file_data
@ -1633,7 +1620,7 @@ class MethodSharepoint(MethodBase):
return ActionResult.isFailure(error=f"Invalid pathQuery '{pathQuery}'. This appears to be search terms, not a valid SharePoint path. Use findDocumentPath action first to search for folders, then use the returned folder path as pathQuery.")
# For pathQuery, we need to discover sites to find the specific one
sites = await self._discoverSharePointSites(connection["accessToken"])
sites = await self._discoverSharePointSites()
if not sites:
return ActionResult.isFailure(error="No SharePoint sites found or accessible")
else:
@ -1680,7 +1667,7 @@ class MethodSharepoint(MethodBase):
endpoint = f"sites/{site_id}/drive/root:/{folder_path_clean}:/children"
# Make the API call to list folder contents
api_result = await self._makeGraphApiCall(connection["accessToken"], endpoint)
api_result = await self._makeGraphApiCall(endpoint)
if "error" in api_result:
logger.warning(f"Failed to list folder {folderPath} in site {site_name}: {api_result['error']}")
@ -1745,7 +1732,7 @@ class MethodSharepoint(MethodBase):
subfolder_endpoint = f"sites/{site_id}/drive/items/{item['id']}/children"
logger.debug(f"Getting contents of subfolder: {item['name']}")
subfolder_result = await self._makeGraphApiCall(connection["accessToken"], subfolder_endpoint)
subfolder_result = await self._makeGraphApiCall(subfolder_endpoint)
if "error" not in subfolder_result:
subfolder_items = subfolder_result.get("value", [])
logger.debug(f"Found {len(subfolder_items)} items in subfolder {item['name']}")

View file

@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
# Global methods catalog - moved from serviceCenter
methods = {}
def _discoverMethods(service_center):
def _discoverMethods(serviceCenter):
"""Dynamically discover all method classes and their actions in modules methods package"""
try:
# Import the methods package
@ -36,7 +36,7 @@ def _discoverMethods(service_center):
issubclass(item, MethodBase) and
item != MethodBase):
# Instantiate the method
methodInstance = item(service_center)
methodInstance = item(serviceCenter)
# Discover actions from public methods
actions = {}
@ -83,11 +83,11 @@ def _discoverMethods(service_center):
except Exception as e:
logger.error(f"Error discovering methods: {str(e)}")
def getMethodsList(service_center) -> List[str]:
def getMethodsList(serviceCenter) -> List[str]:
"""Get list of available methods with their signatures in the required format"""
# Initialize methods if not already done
if not methods:
_discoverMethods(service_center)
_discoverMethods(serviceCenter)
methodList = []
for methodName, method in methods.items():
@ -99,10 +99,10 @@ def getMethodsList(service_center) -> List[str]:
methodList.append(signature)
return methodList
def getEnhancedDocumentContext(service_center) -> str:
def getEnhancedDocumentContext(serviceCenter) -> str:
"""Get enhanced document context formatted for action planning prompts with proper docList and docItem references"""
try:
document_list = service_center.getDocumentReferenceList()
document_list = serviceCenter.getDocumentReferenceList()
# Build technical context string for AI action planning
context = "AVAILABLE DOCUMENTS:\n\n"
@ -114,7 +114,7 @@ def getEnhancedDocumentContext(service_center) -> str:
# Generate docList reference for the exchange (using message ID and label)
# Find the message that corresponds to this exchange
message_id = None
for message in service_center.workflow.messages:
for message in serviceCenter.workflow.messages:
if hasattr(message, 'documentsLabel') and message.documentsLabel == exchange.documentsLabel:
message_id = message.id
break
@ -143,7 +143,7 @@ def getEnhancedDocumentContext(service_center) -> str:
# Generate docList reference for the exchange (using message ID and label)
# Find the message that corresponds to this exchange
message_id = None
for message in service_center.workflow.messages:
for message in serviceCenter.workflow.messages:
if hasattr(message, 'documentsLabel') and message.documentsLabel == exchange.documentsLabel:
message_id = message.id
break

View file

@ -32,7 +32,7 @@ class WorkflowManager:
currentTime = get_utc_timestamp()
if workflowId:
workflow = self.services.getWorkflow(workflowId)
workflow = self.services.workflow.getWorkflow(workflowId)
if not workflow:
raise ValueError(f"Workflow {workflowId} not found")
@ -43,11 +43,11 @@ class WorkflowManager:
logger.info(f"Stopping running workflow {workflowId} before processing new prompt")
workflow.status = "stopped"
workflow.lastActivity = currentTime
self.services.updateWorkflow(workflowId, {
self.services.workflow.updateWorkflow(workflowId, {
"status": "stopped",
"lastActivity": currentTime
})
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflowId,
"message": "Workflow stopped for new prompt",
"type": "info",
@ -57,17 +57,17 @@ class WorkflowManager:
await asyncio.sleep(0.1)
newRound = workflow.currentRound + 1
self.services.updateWorkflow(workflowId, {
self.services.workflow.updateWorkflow(workflowId, {
"status": "running",
"lastActivity": currentTime,
"currentRound": newRound
})
workflow = self.services.getWorkflow(workflowId)
workflow = self.services.workflow.getWorkflow(workflowId)
if not workflow:
raise ValueError(f"Failed to reload workflow {workflowId} after update")
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflowId,
"message": f"Workflow resumed (round {workflow.currentRound})",
"type": "info",
@ -85,7 +85,7 @@ class WorkflowManager:
"currentAction": 0,
"totalTasks": 0,
"totalActions": 0,
"mandateId": self.services.mandateId,
"mandateId": self.services.user.mandateId,
"messageIds": [],
"workflowMode": workflowMode,
"maxSteps": 5 if workflowMode == "React" else 1, # Set maxSteps for React mode
@ -99,12 +99,12 @@ class WorkflowManager:
}
}
workflow = self.services.createWorkflow(workflowData)
workflow = self.services.workflow.createWorkflow(workflowData)
logger.info(f"Created workflow with mode: {getattr(workflow, 'workflowMode', 'NOT_SET')}")
logger.info(f"Workflow data passed: {workflowData.get('workflowMode', 'NOT_IN_DATA')}")
workflow.currentRound = 1
self.services.updateWorkflow(workflow.id, {"currentRound": 1})
self.services.updateWorkflowStats(workflow.id, bytesSent=0, bytesReceived=0)
self.services.workflow.updateWorkflow(workflow.id, {"currentRound": 1})
self.services.workflow.updateWorkflowStats(workflow.id, bytesSent=0, bytesReceived=0)
# Add workflow to services
self.services.workflow = workflow
@ -120,17 +120,17 @@ class WorkflowManager:
async def workflowStop(self, workflowId: str) -> ChatWorkflow:
"""Stops a running workflow."""
try:
workflow = self.services.getWorkflow(workflowId)
workflow = self.services.workflow.getWorkflow(workflowId)
if not workflow:
raise ValueError(f"Workflow {workflowId} not found")
workflow.status = "stopped"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflowId, {
self.services.workflow.updateWorkflow(workflowId, {
"status": "stopped",
"lastActivity": workflow.lastActivity
})
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflowId,
"message": "Workflow stopped",
"type": "warning",
@ -192,7 +192,7 @@ class WorkflowManager:
}
# Create message first to get messageId
message = self.services.createMessage(messageData)
message = self.services.workflow.createMessage(messageData)
if message:
workflow.messages.append(message)
@ -205,7 +205,7 @@ class WorkflowManager:
documents = await self._processFileIds(userInput.listFileId, message.id)
message.documents = documents
# Update the message with documents in database
self.services.updateMessage(message.id, {"documents": [doc.to_dict() for doc in documents]})
self.services.workflow.updateMessage(message.id, {"documents": [doc.to_dict() for doc in documents]})
return message
else:
@ -307,14 +307,14 @@ class WorkflowManager:
"taskProgress": "stopped",
"actionProgress": "stopped"
}
message = self.services.createMessage(stopped_message)
message = self.services.workflow.createMessage(stopped_message)
if message:
workflow.messages.append(message)
# Update workflow status to stopped
workflow.status = "stopped"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "stopped",
"lastActivity": workflow.lastActivity
})
@ -339,14 +339,14 @@ class WorkflowManager:
"taskProgress": "stopped",
"actionProgress": "stopped"
}
message = self.services.createMessage(stopped_message)
message = self.services.workflow.createMessage(stopped_message)
if message:
workflow.messages.append(message)
# Update workflow status to stopped
workflow.status = "stopped"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "stopped",
"lastActivity": workflow.lastActivity,
"totalTasks": workflow.totalTasks,
@ -354,7 +354,7 @@ class WorkflowManager:
})
# Add stopped log entry
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflow.id,
"message": "Workflow stopped by user",
"type": "warning",
@ -381,14 +381,14 @@ class WorkflowManager:
"taskProgress": "fail",
"actionProgress": "fail"
}
message = self.services.createMessage(error_message)
message = self.services.workflow.createMessage(error_message)
if message:
workflow.messages.append(message)
# Update workflow status to failed
workflow.status = "failed"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "failed",
"lastActivity": workflow.lastActivity,
"totalTasks": workflow.totalTasks,
@ -396,7 +396,7 @@ class WorkflowManager:
})
# Add failed log entry
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflow.id,
"message": f"Workflow failed: {workflow_result.error or 'Unknown error'}",
"type": "error",
@ -428,14 +428,14 @@ class WorkflowManager:
"taskProgress": "fail",
"actionProgress": "fail"
}
message = self.services.createMessage(error_message)
message = self.services.workflow.createMessage(error_message)
if message:
workflow.messages.append(message)
# Update workflow status to failed
workflow.status = "failed"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "failed",
"lastActivity": workflow.lastActivity,
"totalTasks": workflow.totalTasks,
@ -473,7 +473,7 @@ class WorkflowManager:
}
# Create message using interface
message = self.services.createMessage(messageData)
message = self.services.workflow.createMessage(messageData)
if message:
workflow.messages.append(message)
@ -482,13 +482,13 @@ class WorkflowManager:
workflow.lastActivity = get_utc_timestamp()
# Update workflow in database
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "completed",
"lastActivity": workflow.lastActivity
})
# Add completion log entry
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflow.id,
"message": "Workflow completed",
"type": "success",
@ -534,7 +534,7 @@ class WorkflowManager:
# Update workflow status to stopped
workflow.status = "stopped"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "stopped",
"lastActivity": workflow.lastActivity,
"totalTasks": workflow.totalTasks,
@ -559,12 +559,12 @@ class WorkflowManager:
"taskProgress": "pending",
"actionProgress": "pending"
}
message = self.services.createMessage(stopped_message)
message = self.services.workflow.createMessage(stopped_message)
if message:
workflow.messages.append(message)
# Add log entry
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflow.id,
"message": "Workflow stopped by user",
"type": "warning",
@ -579,7 +579,7 @@ class WorkflowManager:
# Update workflow status to failed
workflow.status = "failed"
workflow.lastActivity = get_utc_timestamp()
self.services.updateWorkflow(workflow.id, {
self.services.workflow.updateWorkflow(workflow.id, {
"status": "failed",
"lastActivity": workflow.lastActivity,
"totalTasks": workflow.totalTasks,
@ -604,12 +604,12 @@ class WorkflowManager:
"taskProgress": "fail",
"actionProgress": "fail"
}
message = self.services.createMessage(error_message)
message = self.services.workflow.createMessage(error_message)
if message:
workflow.messages.append(message)
# Add error log entry
self.services.createLog({
self.services.workflow.createLog({
"workflowId": workflow.id,
"message": f"Workflow failed: {str(error)}",
"type": "error",