Rev 3.2 running backend

This commit is contained in:
ValueOn AG 2025-09-25 00:18:58 +02:00
parent 29c31e79bd
commit 02db2751c0
4 changed files with 77 additions and 34 deletions

View file

@ -89,10 +89,6 @@ class ConnectorTicketJira(TicketBase):
attributes.append(
TicketFieldAttribute(fieldName=fieldName, field=field_id)
)
logger.info(
f"Successfully retrieved {len(attributes)} field attributes from Jira"
)
return attributes
except aiohttp.ClientError as e:
@ -128,9 +124,6 @@ class ConnectorTicketJira(TicketBase):
attributes.append(
TicketFieldAttribute(fieldName=fieldName, field=field_id)
)
logger.info(
f"Successfully retrieved {len(attributes)} field attributes via fields API"
)
return attributes
except Exception as e:
logger.error(f"Error while calling fields API: {str(e)}")
@ -180,9 +173,6 @@ class ConnectorTicketJira(TicketBase):
headers = {"Content-Type": "application/json"}
# Debug: log the payload being sent
logger.debug(f"JIRA request payload: {json.dumps(payload, indent=2)}")
async with session.post(
url, json=payload, auth=auth, headers=headers
) as response:
@ -318,7 +308,6 @@ class ConnectorTicketJira(TicketBase):
}
else:
# Skip empty ADF fields
logger.debug(f"Skipping empty ADF field {field_id} for task {task_id}")
continue
else:
processed_fields[field_id] = field_value
@ -337,7 +326,7 @@ class ConnectorTicketJira(TicketBase):
url, json=update_data, headers=headers, auth=auth
) as response:
if response.status == 204:
logger.info(f"JIRA task {task_id} updated successfully.")
pass
else:
error_text = await response.text()
logger.error(

View file

@ -2,17 +2,11 @@
import asyncio
import logging
from modules.interfaces.interfaceAppObjects import getRootInterface
# GET EVENT USER
logger = logging.getLogger(__name__)
eventUser = getRootInterface().getUserByUsername("event")
if not eventUser:
logger.error("Event user not found")
# LAUNCH FEATURES
# Import the syncDelta module to initialize it (this will register the scheduler)
from modules.features.syncDelta import mainSyncDelta
from modules.features.syncDelta.mainSyncDelta import ManagerSyncDelta, performSync
managerSyncDelta = ManagerSyncDelta(eventUser)
asyncio.create_task(performSync(eventUser))
# Run initial sync
asyncio.create_task(mainSyncDelta.scheduled_sync())

View file

@ -5,6 +5,7 @@ This module handles the synchronization of tickets to SharePoint using the new
Graph API-based connector architecture.
"""
import asyncio
import logging
import os
import io
@ -84,9 +85,7 @@ class ManagerSyncDelta:
else:
self.services = getServices(eventUser, None)
# Resolve SharePoint connection for the configured user id
self.sharepointConnection = self.services.workflowService.getUserConnectionByExternalUsername(
"msft", self.SHAREPOINT_USER_ID
)
self.sharepointConnection = self.services.workflow.getUserConnectionByExternalUsername("msft", self.SHAREPOINT_USER_ID)
if not self.sharepointConnection:
logger.error(
f"No SharePoint connection found for user: {self.SHAREPOINT_USER_ID}"
@ -126,9 +125,16 @@ class ManagerSyncDelta:
summary = f"Sync {sync_mode} - Updated: {merge_details['updated']}, Added: {merge_details['added']}, Unchanged: {merge_details['unchanged']}"
self._log_audit_event("SYNC_CHANGES_SUMMARY", "INFO", summary)
# Log individual field changes
for change in merge_details['changes']:
# Log individual field changes (limit to first 10 to avoid spam)
for change in merge_details['changes'][:10]:
# Truncate very long changes to avoid logging issues
if len(change) > 500:
change = change[:500] + "... [truncated]"
self._log_audit_event("SYNC_FIELD_CHANGE", "INFO", f"{sync_mode}: {change}")
# Log count if there were more changes
if len(merge_details['changes']) > 10:
self._log_audit_event("SYNC_FIELD_CHANGE", "INFO", f"{sync_mode}: ... and {len(merge_details['changes']) - 10} more changes")
except Exception as e:
logger.warning(f"Failed to log sync changes: {str(e)}")
@ -417,8 +423,18 @@ class ManagerSyncDelta:
if field_config[0] == 'get':
old_value = "" if existing_row.get(field_name) is None else str(existing_row.get(field_name))
new_value = "" if jira_row.get(field_name) is None else str(jira_row.get(field_name))
if old_value != new_value:
row_changes.append(f"{field_name}: '{old_value}''{new_value}'")
# Convert ADF data to readable text for logging
if isinstance(new_value, dict) and new_value.get("type") == "doc":
new_value_readable = self.convertAdfToText(new_value)
if old_value != new_value_readable:
row_changes.append(f"{field_name}: '{old_value[:100]}...' -> '{new_value_readable[:100]}...'")
elif old_value != new_value:
# Truncate long values for logging
old_truncated = old_value[:100] + "..." if len(old_value) > 100 else old_value
new_truncated = new_value[:100] + "..." if len(new_value) > 100 else new_value
row_changes.append(f"{field_name}: '{old_truncated}' -> '{new_truncated}'")
existing_row[field_name] = jira_row.get(field_name)
merged_data.append(existing_row)
if row_changes:
@ -748,13 +764,13 @@ async def dumpTicketDataToFile(
return False
# Main part of the module
async def performSync(eventUser=None) -> bool:
async def performSync(eventUser) -> bool:
"""Perform tickets to SharePoint synchronization
This function is called by the scheduler and can be used independently.
Args:
eventUser: Optional event user to use for synchronization
eventUser: Event user to use for synchronization
Returns:
bool: True if synchronization was successful, False otherwise
@ -762,8 +778,11 @@ async def performSync(eventUser=None) -> bool:
try:
logger.info("Starting DG tickets sync...")
# Sync audit logging is handled by ManagerSyncDelta instance
if not eventUser:
logger.error("Event user not provided - cannot perform sync")
return False
# Sync audit logging is handled by ManagerSyncDelta instance
syncManager = ManagerSyncDelta(eventUser)
success = await syncManager.syncTicketsOverSharepoint()
@ -778,6 +797,42 @@ async def performSync(eventUser=None) -> bool:
logger.error(f"Error in performing DG tickets sync: {str(e)}")
return False
# Create a global instance of ManagerSyncDelta to use for scheduled runs
_sync_manager = None
def initialize_sync_manager(eventUser):
"""Initialize the global sync manager with the eventUser."""
global _sync_manager
if _sync_manager is None:
_sync_manager = ManagerSyncDelta(eventUser)
logger.info("Global sync manager initialized with eventUser")
return _sync_manager
async def scheduled_sync():
"""Scheduled sync function that uses the global sync manager."""
try:
global _sync_manager
if _sync_manager and _sync_manager.eventUser:
return await performSync(_sync_manager.eventUser)
else:
logger.error("Sync manager not properly initialized - no eventUser")
return False
except Exception as e:
logger.error(f"Error in scheduled sync: {str(e)}")
return False
# Initialize sync manager with eventUser
try:
from modules.interfaces.interfaceAppObjects import getRootInterface
eventUser = getRootInterface().getUserByUsername("event")
if eventUser:
initialize_sync_manager(eventUser)
logger.info("Sync manager initialized with eventUser")
else:
logger.error("Event user not found - cannot initialize sync manager")
except Exception as e:
logger.error(f"Failed to initialize sync manager: {str(e)}")
# Register scheduled job on import using the shared event manager
try:
from modules.shared.eventManagement import eventManager
@ -787,7 +842,7 @@ try:
# Schedule sync every 20 minutes (at minutes 00, 20, 40)
eventManager.register_cron(
job_id="dgsync",
func=performSync,
func=scheduled_sync,
cron_kwargs={"minute": "0,20,40"},
replace_existing=True,
coalesce=True,
@ -795,7 +850,13 @@ try:
misfire_grace_time=1800,
)
logger.info("Registered DG ticket sync via EventManagement (every 20 minutes)")
# Run initial sync
import asyncio
asyncio.create_task(scheduled_sync())
logger.info("Initial sync scheduled")
else:
logger.info(f"Skipping DG scheduler registration for ticket sync in env: {APP_ENV_TYPE}")
except Exception as e:
logger.error(f"Failed to register DG ticket sync: {str(e)}")

View file

@ -2,7 +2,6 @@ from typing import Any
from modules.interfaces.interfaceAppModel import User
from modules.interfaces.interfaceChatModel import ChatWorkflow
from modules.services.serviceWorkflows.mainServiceWorkflows import WorkflowService
class PublicService:
"""Lightweight proxy exposing only public callable attributes of a target.