From 7317bce6565e717a4820471c039b3eaa7b225305 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Tue, 16 Sep 2025 09:50:25 +0200
Subject: [PATCH] delta sync
---
delta_sync_fields.txt | 63 ++++++++++++++++++++
modules/connectors/connectorTicketJira.py | 42 +++++++++++--
modules/interfaces/interfaceTicketObjects.py | 37 ++++++++++++
modules/services/serviceDeltaSync.py | 58 +++++++++++++++++-
4 files changed, 193 insertions(+), 7 deletions(-)
create mode 100644 delta_sync_fields.txt
diff --git a/delta_sync_fields.txt b/delta_sync_fields.txt
new file mode 100644
index 00000000..2c6bae46
--- /dev/null
+++ b/delta_sync_fields.txt
@@ -0,0 +1,63 @@
+'Status Category Changed': ['get', ['fields', 'statuscategorychangedate']]
+'Issue Type': ['get', ['fields', 'issuetype']]
+'Time Spent': ['get', ['fields', 'timespent']]
+'Project': ['get', ['fields', 'project']]
+'Fix versions': ['get', ['fields', 'fixVersions']]
+'Σ Time Spent': ['get', ['fields', 'aggregatetimespent']]
+'Status Category': ['get', ['fields', 'statusCategory']]
+'Parent': ['get', ['fields', 'parent']]
+'Resolution': ['get', ['fields', 'resolution']]
+'Design': ['get', ['fields', 'customfield_10037']]
+'Resolved': ['get', ['fields', 'resolutiondate']]
+'Work Ratio': ['get', ['fields', 'workratio']]
+'Last Viewed': ['get', ['fields', 'lastViewed']]
+'Watchers': ['get', ['fields', 'watches']]
+'Restrict to': ['get', ['fields', 'issuerestriction']]
+'Images': ['get', ['fields', 'thumbnail']]
+'DELTA Comments (i)': ['get', ['fields', 'customfield_10060']]
+'Created': ['get', ['fields', 'created']]
+'Issue Status': ['get', ['fields', 'customfield_10062']]
+'Initial_Import_ID': ['get', ['fields', 'customfield_10063']]
+'Selise Comments (i)': ['get', ['fields', 'customfield_10064']]
+'Flagged': ['get', ['fields', 'customfield_10021']]
+'Selise Status Values': ['get', ['fields', 'customfield_10065']]
+'References': ['get', ['fields', 'customfield_10066']]
+'Priority': ['get', ['fields', 'priority']]
+'Selise Ticket References': ['get', ['fields', 'customfield_10067']]
+'Gemeldet von': ['get', ['fields', 'customfield_10101']]
+'Labels': ['get', ['fields', 'labels']]
+'Rank': ['get', ['fields', 'customfield_10019']]
+'Remaining Estimate': ['get', ['fields', 'timeestimate']]
+'Σ Original Estimate': ['get', ['fields', 'aggregatetimeoriginalestimate']]
+'Affects versions': ['get', ['fields', 'versions']]
+'Linked Issues': ['get', ['fields', 'issuelinks']]
+'Assignee': ['get', ['fields', 'assignee']]
+'Updated': ['get', ['fields', 'updated']]
+'Status': ['get', ['fields', 'status']]
+'Components': ['get', ['fields', 'components']]
+'Key': ['get', ['fields', 'issuekey']]
+'Original estimate': ['get', ['fields', 'timeoriginalestimate']]
+'Description': ['get', ['fields', 'description']]
+'Category': ['get', ['fields', 'customfield_10056']]
+'Topic Group': ['get', ['fields', 'customfield_10057']]
+'Module Category': ['get', ['fields', 'customfield_10058']]
+'Time tracking': ['get', ['fields', 'timetracking']]
+'Start date': ['get', ['fields', 'customfield_10015']]
+'Security Level': ['get', ['fields', 'security']]
+'Attachment': ['get', ['fields', 'attachment']]
+'Σ Remaining Estimate': ['get', ['fields', 'aggregatetimeestimate']]
+'Summary': ['get', ['fields', 'summary']]
+'Creator': ['get', ['fields', 'creator']]
+'Sub-tasks': ['get', ['fields', 'subtasks']]
+'Reporter': ['get', ['fields', 'reporter']]
+'Σ Progress': ['get', ['fields', 'aggregateprogress']]
+'Development': ['get', ['fields', 'customfield_10000']]
+'Team': ['get', ['fields', 'customfield_10001']]
+'DELTA Comments': ['get', ['fields', 'customfield_10167']]
+'SELISE Comments': ['get', ['fields', 'customfield_10168']]
+'Environment': ['get', ['fields', 'environment']]
+'Due date': ['get', ['fields', 'duedate']]
+'Progress': ['get', ['fields', 'progress']]
+'Votes': ['get', ['fields', 'votes']]
+'Comment': ['get', ['fields', 'comment']]
+'Log Work': ['get', ['fields', 'worklog']]
diff --git a/modules/connectors/connectorTicketJira.py b/modules/connectors/connectorTicketJira.py
index 91681ba3..29faa32a 100644
--- a/modules/connectors/connectorTicketJira.py
+++ b/modules/connectors/connectorTicketJira.py
@@ -51,7 +51,7 @@ class ConnectorTicketJira(TicketBase):
"""
jql_query = f"project={self.project_code} AND issuetype={self.issue_type}"
- # Prepare the request URL and parameters
+ # Prepare the request URL and parameters (use new search endpoint)
url = f"{self.jira_url}/rest/api/3/search/jql"
params = {"jql": jql_query, "maxResults": 1, "expand": "names"}
@@ -76,9 +76,12 @@ class ConnectorTicketJira(TicketBase):
issues = data.get("issues", [])
field_names = data.get("names", {})
- if not issues:
- logger.warning(f"No issues found for query: {jql_query}")
- return []
+ # If no issues or fields are present, fall back to the fields API
+ if not issues or not issues[0].get("fields"):
+ logger.warning(
+ "No issue fields returned by search; falling back to /rest/api/3/field"
+ )
+ return await self._read_all_fields_via_fields_api()
# Extract field attributes from the first issue
attributes = []
@@ -106,6 +109,37 @@ class ConnectorTicketJira(TicketBase):
logger.error(f"Unexpected error while fetching Jira attributes: {str(e)}")
raise
+ async def _read_all_fields_via_fields_api(self) -> list[TicketFieldAttribute]:
+ """Fallback: use Jira fields API to list all fields with id->name mapping."""
+ auth = aiohttp.BasicAuth(self.jira_username, self.jira_api_token)
+ url = f"{self.jira_url}/rest/api/3/field"
+ try:
+ async with aiohttp.ClientSession() as session:
+ async with session.get(url, auth=auth) as response:
+ if response.status != 200:
+ error_text = await response.text()
+ logger.error(
+ f"Jira fields API failed with status {response.status}: {error_text}"
+ )
+ return []
+
+ data = await response.json()
+ attributes: list[TicketFieldAttribute] = []
+ for field in data:
+ field_id = field.get("id")
+ field_name = field.get("name", field_id)
+ if field_id:
+ attributes.append(
+ TicketFieldAttribute(field_name=field_name, field=field_id)
+ )
+ logger.info(
+ f"Successfully retrieved {len(attributes)} field attributes via fields API"
+ )
+ return attributes
+ except Exception as e:
+ logger.error(f"Error while calling fields API: {str(e)}")
+ return []
+
async def read_tasks(self, *, limit: int = 0) -> list[Task]:
"""
Read tasks from Jira with pagination support.
diff --git a/modules/interfaces/interfaceTicketObjects.py b/modules/interfaces/interfaceTicketObjects.py
index d1c9389a..cc093636 100644
--- a/modules/interfaces/interfaceTicketObjects.py
+++ b/modules/interfaces/interfaceTicketObjects.py
@@ -87,6 +87,8 @@ class TicketSharepointSyncInterface:
audit_log.append("Step 2: Transforming JIRA data...")
transformed_tasks = self._transform_tasks(tickets, include_put=True)
jira_data = [task.data for task in transformed_tasks]
+ # Remove empty records and those without an ID to avoid blank rows
+ jira_data = self._filter_empty_records(jira_data)
audit_log.append(f"JIRA issues transformed: {len(jira_data)}")
audit_log.append("")
@@ -478,6 +480,8 @@ class TicketSharepointSyncInterface:
# 7. Create Excel with 4-row structure and write to SharePoint
audit_log.append("Step 7: Writing updated Excel to SharePoint...")
+ # Ensure no empty records are written
+ merged_data = self._filter_empty_records(merged_data)
excel_content = self._create_excel_content(merged_data, existing_headers)
await self.connector_sharepoint.upload_file(
site_id=self.site_id,
@@ -721,6 +725,39 @@ class TicketSharepointSyncInterface:
except (KeyError, TypeError):
return None
+ def _filter_empty_records(self, records: list[dict]) -> list[dict]:
+ """Remove records that are effectively empty or missing an ID.
+
+ - Drop rows with no 'ID'
+ - Drop rows where all mapped fields are empty/None/''
+ """
+ filtered: list[dict] = []
+ field_names = set(self.task_sync_definition.keys())
+ for row in records:
+ if not isinstance(row, dict):
+ continue
+ # Require ID
+ task_id = row.get("ID")
+ if not task_id:
+ continue
+ # Check if all mapped fields are empty
+ non_empty = False
+ for name in field_names:
+ val = row.get(name)
+ if val is None:
+ continue
+ if isinstance(val, str) and val.strip() == "":
+ continue
+ # Consider dict/list values as non-empty if they have content
+ if isinstance(val, (list, dict)):
+ if len(val) == 0:
+ continue
+ non_empty = True
+ break
+ if non_empty:
+ filtered.append(row)
+ return filtered
+
def _merge_jira_with_existing(
self, jira_data: list[dict], existing_data: list[dict]
) -> list[dict]:
diff --git a/modules/services/serviceDeltaSync.py b/modules/services/serviceDeltaSync.py
index 709bc3e7..14697c60 100644
--- a/modules/services/serviceDeltaSync.py
+++ b/modules/services/serviceDeltaSync.py
@@ -6,6 +6,7 @@ Graph API-based connector architecture.
"""
import logging
+import os
import csv
import io
from datetime import datetime, UTC
@@ -70,10 +71,10 @@ class ManagerSyncDelta:
'Assignee': ['get', ['fields', 'assignee', 'displayName']],
'Issue Created': ['get', ['fields', 'created']],
'Due Date': ['get', ['fields', 'duedate']],
- 'DELTA Comments': ['get', ['fields', 'customfield_10060']],
+ 'DELTA Comments': ['get', ['fields', 'customfield_10167']],
'SELISE Ticket References': ['put', ['fields', 'customfield_10067']],
'SELISE Status Values': ['put', ['fields', 'customfield_10065']],
- 'SELISE Comments': ['put', ['fields', 'customfield_10064']],
+ 'SELISE Comments': ['put', ['fields', 'customfield_10168']],
}
def __init__(self):
@@ -207,6 +208,12 @@ class ManagerSyncDelta:
logger.error("Failed to initialize connectors")
return False
+ # Dump current Jira fields to text file for reference
+ # try:
+ # await dump_jira_fields_to_file()
+ # except Exception as e:
+ # logger.warning(f"Failed to dump JIRA fields (non-blocking): {str(e)}")
+
# Get the appropriate sync file name based on mode
sync_file_name = self.get_sync_file_name()
logger.info(f"Using sync file: {sync_file_name}")
@@ -244,6 +251,51 @@ class ManagerSyncDelta:
+# Utility: dump all Jira fields (name -> field id) to a text file
+async def dump_jira_fields_to_file(filepath: str = "delta_sync_fields.txt") -> bool:
+ """Write all available JIRA fields for the configured project/issue type to a text file.
+
+ The output format matches the legacy fields.txt, e.g.:
+ 'Summary': ['get', ['fields', 'summary']]
+
+ Args:
+ filepath: Target text file path to write.
+
+ Returns:
+ True on success, False otherwise.
+ """
+ try:
+ # Initialize Jira connector with the hardcoded credentials/constants
+ jira = await ConnectorTicketJira.create(
+ jira_username=ManagerSyncDelta.JIRA_USERNAME,
+ jira_api_token=ManagerSyncDelta.JIRA_API_TOKEN,
+ jira_url=ManagerSyncDelta.JIRA_URL,
+ project_code=ManagerSyncDelta.JIRA_PROJECT_CODE,
+ issue_type=ManagerSyncDelta.JIRA_ISSUE_TYPE,
+ )
+
+ attributes = await jira.read_attributes()
+ if not attributes:
+ logger.warning("No JIRA attributes returned; nothing to write.")
+ return False
+
+ # Ensure directory exists if a directory part is provided
+ dir_name = os.path.dirname(filepath)
+ if dir_name:
+ os.makedirs(dir_name, exist_ok=True)
+
+ # Write in the expected mapping format
+ with open(filepath, "w", encoding="utf-8") as f:
+ for attr in attributes:
+ # attr.field_name (human name), attr.field (Jira field id)
+ f.write(f"'{attr.field_name}': ['get', ['fields', '{attr.field}']]\n")
+
+ logger.info(f"Wrote {len(attributes)} JIRA fields to {filepath}")
+ return True
+ except Exception as e:
+ logger.error(f"Failed to dump JIRA fields: {str(e)}")
+ return False
+
# Global sync function for use in app.py
async def perform_sync_jira_delta_group() -> bool:
"""Perform JIRA to SharePoint synchronization for Delta Group.
@@ -254,7 +306,7 @@ async def perform_sync_jira_delta_group() -> bool:
bool: True if synchronization was successful, False otherwise
"""
try:
- if APP_ENV_TYPE != "prod":
+ if APP_ENV_TYPE != "prod" and APP_ENV_TYPE != "dev":
logger.info("JIRA to SharePoint synchronization: TASK to run only in PROD")
return True