From dc0346904f6361a9c44254f2371e797cb89f03cd Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Tue, 21 Apr 2026 18:14:21 +0200
Subject: [PATCH] redmine integration
---
app.py | 8 +-
modules/connectors/connectorTicketsRedmine.py | 410 ++++++++++++
modules/features/redmine/__init__.py | 3 +
modules/features/redmine/datamodelRedmine.py | 530 +++++++++++++++
.../redmine/interfaceFeatureRedmine.py | 449 +++++++++++++
modules/features/redmine/mainRedmine.py | 329 ++++++++++
.../features/redmine/routeFeatureRedmine.py | 478 ++++++++++++++
modules/features/redmine/serviceRedmine.py | 609 ++++++++++++++++++
.../features/redmine/serviceRedmineStats.py | 403 ++++++++++++
.../redmine/serviceRedmineStatsCache.py | 105 +++
.../features/redmine/serviceRedmineSync.py | 315 +++++++++
modules/routes/routeSystem.py | 3 +
.../serviceAgent/conversationManager.py | 27 +-
.../services/serviceAgent/featureDataAgent.py | 13 +
modules/shared/timeUtils.py | 73 ++-
pytest.ini | 8 +-
tests/fixtures/__init__.py | 0
tests/fixtures/loadRedmineSnapshot.py | 73 +++
tests/fixtures/redmineSnapshot.json | 98 +++
tests/test_service_redmine_orphans.py | 48 ++
tests/test_service_redmine_stats.py | 122 ++++
tests/test_service_redmine_stats_cache.py | 57 ++
22 files changed, 4154 insertions(+), 7 deletions(-)
create mode 100644 modules/connectors/connectorTicketsRedmine.py
create mode 100644 modules/features/redmine/__init__.py
create mode 100644 modules/features/redmine/datamodelRedmine.py
create mode 100644 modules/features/redmine/interfaceFeatureRedmine.py
create mode 100644 modules/features/redmine/mainRedmine.py
create mode 100644 modules/features/redmine/routeFeatureRedmine.py
create mode 100644 modules/features/redmine/serviceRedmine.py
create mode 100644 modules/features/redmine/serviceRedmineStats.py
create mode 100644 modules/features/redmine/serviceRedmineStatsCache.py
create mode 100644 modules/features/redmine/serviceRedmineSync.py
create mode 100644 tests/fixtures/__init__.py
create mode 100644 tests/fixtures/loadRedmineSnapshot.py
create mode 100644 tests/fixtures/redmineSnapshot.json
create mode 100644 tests/test_service_redmine_orphans.py
create mode 100644 tests/test_service_redmine_stats.py
create mode 100644 tests/test_service_redmine_stats_cache.py
diff --git a/app.py b/app.py
index 482b3376..8e3552b5 100644
--- a/app.py
+++ b/app.py
@@ -519,14 +519,18 @@ from modules.auth import (
ProactiveTokenRefreshMiddleware,
)
-# i18n language detection middleware (sets per-request language from Accept-Language header)
+# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
+# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
+# without having to thread them through every call site.
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
+from modules.shared.timeUtils import _setRequestTimezone
@app.middleware("http")
-async def _i18nMiddleware(request: Request, call_next):
+async def _requestContextMiddleware(request: Request, call_next):
acceptLang = request.headers.get("Accept-Language", "")
lang = normalizePrimaryLanguageTag(acceptLang, "de")
_setLanguage(lang)
+ _setRequestTimezone(request.headers.get("X-User-Timezone", ""))
return await call_next(request)
app.add_middleware(CSRFMiddleware)
diff --git a/modules/connectors/connectorTicketsRedmine.py b/modules/connectors/connectorTicketsRedmine.py
new file mode 100644
index 00000000..dfdf3dff
--- /dev/null
+++ b/modules/connectors/connectorTicketsRedmine.py
@@ -0,0 +1,410 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine REST connector.
+
+Async / aiohttp port of the SSS pilot client
+(``pamocreate/projects/valueon/sss/project_mars/redmine-sync/code/_redmineClient.py``)
+plus the read-side helpers required by ``serviceRedmine`` and
+``serviceRedmineStats``.
+
+Auth: ``X-Redmine-API-Key`` header. The key is *never* logged.
+
+Idempotency / safety:
+- ``DELETE /issues/{id}`` is often forbidden in Redmine (HTTP 403).
+ ``deleteIssue`` returns ``False`` instead of raising in that case so
+ the higher layer can fall back to status-based archival.
+- A small ``_throttleSeconds`` delay (default 150 ms) is awaited after
+ every write call to keep the SSS server happy.
+"""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+from typing import Any, Dict, List, Optional, Tuple
+from urllib.parse import urlencode
+
+import aiohttp
+
+from modules.datamodels.datamodelTickets import TicketBase, TicketFieldAttribute
+
+logger = logging.getLogger(__name__)
+
+
+class RedmineApiError(RuntimeError):
+ """Raised when the Redmine API returns a non-success status."""
+
+ def __init__(self, status: int, body: str, method: str, path: str):
+ self.status = status
+ self.body = body
+ self.method = method
+ self.path = path
+ super().__init__(f"Redmine {method} {path} failed: HTTP {status} {body[:300]}")
+
+
+class ConnectorTicketsRedmine(TicketBase):
+ """Async Redmine connector. One instance per (baseUrl, apiKey, projectId)."""
+
+ def __init__(
+ self,
+ *,
+ baseUrl: str,
+ apiKey: str,
+ projectId: str,
+ throttleSeconds: float = 0.15,
+ timeoutSeconds: float = 30.0,
+ ) -> None:
+ if not baseUrl:
+ raise ValueError("Redmine baseUrl is required")
+ if not apiKey:
+ raise ValueError("Redmine apiKey is required")
+ self._baseUrl = baseUrl.rstrip("/")
+ self._apiKey = apiKey
+ self._projectId = str(projectId) if projectId is not None else ""
+ self._throttleSeconds = max(0.0, float(throttleSeconds))
+ self._timeoutSeconds = float(timeoutSeconds)
+
+ # ------------------------------------------------------------------
+ # Low-level
+ # ------------------------------------------------------------------
+
+ def _headers(self) -> Dict[str, str]:
+ return {
+ "X-Redmine-API-Key": self._apiKey,
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ }
+
+ async def _call(
+ self,
+ method: str,
+ path: str,
+ *,
+ payload: Optional[Dict[str, Any]] = None,
+ params: Optional[Dict[str, Any]] = None,
+ ) -> Tuple[int, Optional[Dict[str, Any]], str]:
+ """Single REST call. Returns ``(status, json_or_none, raw_body)``.
+
+ Does *not* raise -- the caller decides whether a non-2xx is fatal
+ (e.g. 403 on DELETE is expected and handled).
+ """
+ url = f"{self._baseUrl}{path}"
+ if params:
+ url = f"{url}?{urlencode(params)}"
+ timeout = aiohttp.ClientTimeout(total=self._timeoutSeconds)
+ try:
+ async with aiohttp.ClientSession(timeout=timeout) as session:
+ async with session.request(method, url, headers=self._headers(), json=payload) as resp:
+ raw = await resp.text()
+ parsed: Optional[Dict[str, Any]] = None
+ if raw:
+ try:
+ parsed = await resp.json(content_type=None)
+ except Exception:
+ parsed = None
+ return resp.status, parsed, raw
+ except aiohttp.ClientError as e:
+ logger.warning(f"Redmine {method} {path} client error: {e}")
+ return -1, None, f"ClientError: {e}"
+ except asyncio.TimeoutError:
+ logger.warning(f"Redmine {method} {path} timeout after {self._timeoutSeconds}s")
+ return -1, None, "Timeout"
+
+ @staticmethod
+ def _isOk(status: int) -> bool:
+ return 200 <= status < 300
+
+ async def _gentle(self) -> None:
+ if self._throttleSeconds > 0:
+ await asyncio.sleep(self._throttleSeconds)
+
+ # ------------------------------------------------------------------
+ # Identity / health
+ # ------------------------------------------------------------------
+
+ async def whoAmI(self) -> Dict[str, Any]:
+ status, body, raw = await self._call("GET", "/users/current.json")
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", "/users/current.json")
+ return body.get("user", {})
+
+ # ------------------------------------------------------------------
+ # Project meta -- trackers, statuses, priorities, custom fields, users
+ # ------------------------------------------------------------------
+
+ async def getTrackers(self) -> List[Dict[str, Any]]:
+ status, body, raw = await self._call("GET", "/trackers.json")
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", "/trackers.json")
+ return body.get("trackers", []) or []
+
+ async def getStatuses(self) -> List[Dict[str, Any]]:
+ status, body, raw = await self._call("GET", "/issue_statuses.json")
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", "/issue_statuses.json")
+ return body.get("issue_statuses", []) or []
+
+ async def getPriorities(self) -> List[Dict[str, Any]]:
+ status, body, raw = await self._call(
+ "GET", "/enumerations/issue_priorities.json"
+ )
+ if not self._isOk(status) or not body:
+ return []
+ return body.get("issue_priorities", []) or []
+
+ async def getCustomFields(self) -> List[Dict[str, Any]]:
+ """Requires admin privileges in Redmine. Returns ``[]`` if forbidden."""
+ status, body, raw = await self._call("GET", "/custom_fields.json")
+ if status == 403 or status == 401:
+ logger.info("Redmine /custom_fields.json forbidden -- using per-issue field discovery")
+ return []
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", "/custom_fields.json")
+ return body.get("custom_fields", []) or []
+
+ async def getProjectUsers(self) -> List[Dict[str, Any]]:
+ status, body, raw = await self._call(
+ "GET", f"/projects/{self._projectId}/memberships.json", params={"limit": 100}
+ )
+ if not self._isOk(status) or not body:
+ return []
+ members = body.get("memberships", []) or []
+ users: List[Dict[str, Any]] = []
+ seen: set[int] = set()
+ for m in members:
+ user = m.get("user")
+ if not user:
+ continue
+ uid = user.get("id")
+ if uid in seen:
+ continue
+ seen.add(uid)
+ users.append(user)
+ return users
+
+ async def getProjectInfo(self) -> Dict[str, Any]:
+ status, body, raw = await self._call("GET", f"/projects/{self._projectId}.json")
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", f"/projects/{self._projectId}.json")
+ return body.get("project", {})
+
+ # ------------------------------------------------------------------
+ # Issues -- read
+ # ------------------------------------------------------------------
+
+ async def getIssue(
+ self, issueId: int, *, includeRelations: bool = True, includeChildren: bool = False
+ ) -> Dict[str, Any]:
+ includes = ["custom_fields", "journals"]
+ if includeRelations:
+ includes.append("relations")
+ if includeChildren:
+ includes.append("children")
+ params = {"include": ",".join(includes)}
+ status, body, raw = await self._call("GET", f"/issues/{issueId}.json", params=params)
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", f"/issues/{issueId}.json")
+ return body.get("issue", {})
+
+ async def listIssues(
+ self,
+ *,
+ trackerId: Optional[int] = None,
+ statusId: Optional[str] = "*",
+ updatedOnFrom: Optional[str] = None,
+ updatedOnTo: Optional[str] = None,
+ createdOnFrom: Optional[str] = None,
+ createdOnTo: Optional[str] = None,
+ assignedToId: Optional[int] = None,
+ subjectContains: Optional[str] = None,
+ limit: int = 100,
+ offset: int = 0,
+ include: Optional[List[str]] = None,
+ ) -> Dict[str, Any]:
+ """Single-page list. Returns the raw envelope ``{issues, total_count, offset, limit}``."""
+ params: Dict[str, Any] = {
+ "project_id": self._projectId,
+ "limit": str(limit),
+ "offset": str(offset),
+ }
+ if statusId is not None:
+ params["status_id"] = str(statusId)
+ if trackerId is not None:
+ params["tracker_id"] = str(trackerId)
+ if assignedToId is not None:
+ params["assigned_to_id"] = str(assignedToId)
+ if subjectContains:
+ params["subject"] = f"~{subjectContains}"
+ if updatedOnFrom and updatedOnTo:
+ params["updated_on"] = f"><{updatedOnFrom}|{updatedOnTo}"
+ elif updatedOnFrom:
+ params["updated_on"] = f">={updatedOnFrom}"
+ elif updatedOnTo:
+ params["updated_on"] = f"<={updatedOnTo}"
+ if createdOnFrom and createdOnTo:
+ params["created_on"] = f"><{createdOnFrom}|{createdOnTo}"
+ elif createdOnFrom:
+ params["created_on"] = f">={createdOnFrom}"
+ elif createdOnTo:
+ params["created_on"] = f"<={createdOnTo}"
+ if include:
+ params["include"] = ",".join(include)
+
+ status, body, raw = await self._call("GET", "/issues.json", params=params)
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "GET", "/issues.json")
+ return body
+
+ async def listAllIssues(
+ self,
+ *,
+ trackerId: Optional[int] = None,
+ statusId: Optional[str] = "*",
+ updatedOnFrom: Optional[str] = None,
+ updatedOnTo: Optional[str] = None,
+ createdOnFrom: Optional[str] = None,
+ createdOnTo: Optional[str] = None,
+ assignedToId: Optional[int] = None,
+ pageSize: int = 100,
+ maxPages: int = 50,
+ include: Optional[List[str]] = None,
+ ) -> List[Dict[str, Any]]:
+ """Paginate ``listIssues`` and return all matching raw issues."""
+ all_issues: List[Dict[str, Any]] = []
+ offset = 0
+ for _page in range(maxPages):
+ envelope = await self.listIssues(
+ trackerId=trackerId,
+ statusId=statusId,
+ updatedOnFrom=updatedOnFrom,
+ updatedOnTo=updatedOnTo,
+ createdOnFrom=createdOnFrom,
+ createdOnTo=createdOnTo,
+ assignedToId=assignedToId,
+ limit=pageSize,
+ offset=offset,
+ include=include,
+ )
+ page_issues = envelope.get("issues", []) or []
+ all_issues.extend(page_issues)
+ total = int(envelope.get("total_count") or 0)
+ offset += len(page_issues)
+ if not page_issues or offset >= total:
+ break
+ return all_issues
+
+ async def listRelations(self, issueId: int) -> List[Dict[str, Any]]:
+ issue = await self.getIssue(issueId, includeRelations=True)
+ return issue.get("relations", []) or []
+
+ # ------------------------------------------------------------------
+ # Issues -- write
+ # ------------------------------------------------------------------
+
+ async def createIssue(self, fields: Dict[str, Any]) -> Dict[str, Any]:
+ body_in = {"issue": dict(fields)}
+ body_in["issue"].setdefault("project_id", self._projectId)
+ status, body, raw = await self._call("POST", "/issues.json", payload=body_in)
+ await self._gentle()
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "POST", "/issues.json")
+ return body.get("issue", {})
+
+ async def updateIssue(
+ self, issueId: int, fields: Dict[str, Any], *, notes: Optional[str] = None
+ ) -> bool:
+ body_in: Dict[str, Any] = {"issue": dict(fields)}
+ if notes:
+ body_in["issue"]["notes"] = notes
+ status, body, raw = await self._call("PUT", f"/issues/{issueId}.json", payload=body_in)
+ await self._gentle()
+ if status == 204:
+ return True
+ if not self._isOk(status):
+ raise RedmineApiError(status, raw, "PUT", f"/issues/{issueId}.json")
+ return True
+
+ async def deleteIssue(self, issueId: int) -> bool:
+ """Returns ``False`` if Redmine forbids deletion (HTTP 403/401)."""
+ status, body, raw = await self._call("DELETE", f"/issues/{issueId}.json")
+ await self._gentle()
+ if status in (200, 204):
+ return True
+ if status in (401, 403):
+ logger.info(f"Redmine DELETE issue {issueId} forbidden ({status}) -- caller should fall back")
+ return False
+ raise RedmineApiError(status, raw, "DELETE", f"/issues/{issueId}.json")
+
+ # ------------------------------------------------------------------
+ # Relations -- write
+ # ------------------------------------------------------------------
+
+ async def addRelation(
+ self, fromId: int, toId: int, *, relationType: str = "relates", delay: Optional[int] = None
+ ) -> Dict[str, Any]:
+ rel: Dict[str, Any] = {"issue_to_id": toId, "relation_type": relationType}
+ if delay is not None:
+ rel["delay"] = int(delay)
+ status, body, raw = await self._call(
+ "POST", f"/issues/{fromId}/relations.json", payload={"relation": rel}
+ )
+ await self._gentle()
+ if not self._isOk(status) or not body:
+ raise RedmineApiError(status, raw, "POST", f"/issues/{fromId}/relations.json")
+ return body.get("relation", {})
+
+ async def deleteRelation(self, relationId: int) -> bool:
+ status, body, raw = await self._call("DELETE", f"/relations/{relationId}.json")
+ await self._gentle()
+ if status in (200, 204):
+ return True
+ if status in (401, 403):
+ return False
+ raise RedmineApiError(status, raw, "DELETE", f"/relations/{relationId}.json")
+
+ # ------------------------------------------------------------------
+ # TicketBase compliance (used by AI-tool path)
+ # ------------------------------------------------------------------
+
+ async def readAttributes(self) -> List[TicketFieldAttribute]:
+ """Static base attributes + project custom fields (best-effort)."""
+ attrs: List[TicketFieldAttribute] = [
+ TicketFieldAttribute(fieldName="Subject", field="subject"),
+ TicketFieldAttribute(fieldName="Description", field="description"),
+ TicketFieldAttribute(fieldName="Tracker", field="tracker_id"),
+ TicketFieldAttribute(fieldName="Status", field="status_id"),
+ TicketFieldAttribute(fieldName="Priority", field="priority_id"),
+ TicketFieldAttribute(fieldName="Assignee", field="assigned_to_id"),
+ TicketFieldAttribute(fieldName="Parent", field="parent_issue_id"),
+ TicketFieldAttribute(fieldName="Target Version", field="fixed_version_id"),
+ ]
+ try:
+ cfs = await self.getCustomFields()
+ except Exception:
+ cfs = []
+ for cf in cfs:
+ try:
+ attrs.append(
+ TicketFieldAttribute(
+ fieldName=str(cf.get("name", f"cf_{cf.get('id')}")),
+ field=f"cf_{cf.get('id')}",
+ )
+ )
+ except Exception:
+ continue
+ return attrs
+
+ async def readTasks(self, *, limit: int = 0) -> List[Dict[str, Any]]:
+ if limit and limit > 0:
+ envelope = await self.listIssues(limit=limit)
+ return envelope.get("issues", []) or []
+ return await self.listAllIssues()
+
+ async def writeTasks(self, tasklist: List[Dict[str, Any]]) -> None:
+ for task in tasklist:
+ issue_id = task.get("id")
+ fields = {k: v for k, v in task.items() if k != "id"}
+ if issue_id:
+ await self.updateIssue(int(issue_id), fields)
+ else:
+ await self.createIssue(fields)
diff --git a/modules/features/redmine/__init__.py b/modules/features/redmine/__init__.py
new file mode 100644
index 00000000..964637d5
--- /dev/null
+++ b/modules/features/redmine/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine feature container -- ticket browser, statistics, AI tools."""
diff --git a/modules/features/redmine/datamodelRedmine.py b/modules/features/redmine/datamodelRedmine.py
new file mode 100644
index 00000000..0e8a3302
--- /dev/null
+++ b/modules/features/redmine/datamodelRedmine.py
@@ -0,0 +1,530 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine feature data models.
+
+Two layers:
+
+1. **Persisted** (``PowerOnModel``, auto-DDL into ``poweron_redmine``):
+ - ``RedmineInstanceConfig``: per-feature-instance connection + sync state.
+ - ``RedmineTicketMirror``: local mirror of a Redmine issue.
+ - ``RedmineRelationMirror``: local mirror of an issue relation.
+
+2. **Transport** (plain Pydantic): ``Redmine*Dto`` returned over the
+ REST API and shared with the AI tools. The frontend (``RedmineStatsPage``)
+ maps the raw ``RedmineStatsDto`` buckets onto ``ReportSection`` for
+ ``FormGeneratorReport``.
+
+Scale: the mirror tables let us aggregate stats and render the ticket tree
+for projects with 20k+ tickets without round-tripping the Redmine REST API
+on every request.
+"""
+
+import uuid
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel, Field, model_validator
+
+from modules.datamodels.datamodelBase import PowerOnModel
+from modules.shared.i18nRegistry import i18nModel
+
+
+def _coerceNoneToDefaults(cls, values):
+ """Replace None values with each field's declared default.
+
+ Reason: Postgres rows written before we added a column return NULL for
+ that column, which Pydantic v2 rejects for non-Optional fields even if
+ a default is declared. We only apply the default when the incoming
+ value is explicitly None AND the field has a default (not a
+ default_factory that would generate a new value).
+ """
+ if not isinstance(values, dict):
+ return values
+ for name, field in cls.model_fields.items():
+ if name in values and values[name] is None and field.default is not None:
+ values[name] = field.default
+ return values
+
+
+# ---------------------------------------------------------------------------
+# Persisted: per feature-instance Redmine connection config + sync state
+# ---------------------------------------------------------------------------
+
+@i18nModel("Redmine-Verbindung")
+class RedmineInstanceConfig(PowerOnModel):
+ """Per feature-instance Redmine connection config.
+
+ The API key is stored encrypted (``encryptValue`` keyed
+ ``"redmineApiKey"``). It is never returned to the frontend in plain
+ text -- the route returns a boolean ``hasApiKey`` flag instead.
+ """
+
+ @model_validator(mode="before")
+ @classmethod
+ def _applyDefaults(cls, values):
+ return _coerceNoneToDefaults(cls, values)
+
+ id: str = Field(
+ default_factory=lambda: str(uuid.uuid4()),
+ description="Primary key",
+ json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+ featureInstanceId: str = Field(
+ description="FK -> FeatureInstance.id (1:1 per instance)",
+ json_schema_extra={
+ "label": "Feature-Instanz",
+ "frontend_type": "text",
+ "frontend_readonly": True,
+ "frontend_required": True,
+ "fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
+ },
+ )
+ mandateId: Optional[str] = Field(
+ default=None,
+ description="Mandate ID (auto-set from feature instance)",
+ json_schema_extra={
+ "label": "Mandant",
+ "frontend_type": "text",
+ "frontend_readonly": True,
+ "frontend_required": False,
+ "fk_target": {"db": "poweron_app", "table": "Mandate"},
+ },
+ )
+ baseUrl: str = Field(
+ default="",
+ description="Redmine base URL, e.g. https://redmine.logobject.ch",
+ json_schema_extra={"label": "Basis-URL", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
+ )
+ projectId: str = Field(
+ default="",
+ description="Redmine numeric project id or identifier (slug)",
+ json_schema_extra={"label": "Projekt-ID", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
+ )
+ encryptedApiKey: str = Field(
+ default="",
+ description="Encrypted Redmine API key (X-Redmine-API-Key)",
+ json_schema_extra={"label": "API-Key (verschluesselt)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
+ )
+ rootTrackerName: str = Field(
+ default="Userstory",
+ description="Name of the tracker used as the tree root in the browser. Set explicitly in config; resolved against the live tracker list at runtime.",
+ json_schema_extra={"label": "Wurzel-Tracker (Name)", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
+ )
+ defaultPeriodValue: Optional[Dict[str, Any]] = Field(
+ default=None,
+ description="Optional snapshot of a frontend ``PeriodValue`` ({preset, fromDate, toDate}) used as default period when the user opens the feature.",
+ json_schema_extra={"label": "Standard-Zeitraum", "frontend_type": "json", "frontend_readonly": False, "frontend_required": False},
+ )
+ schemaCache: Optional[Dict[str, Any]] = Field(
+ default=None,
+ description="Cached project meta: {trackers:[{id,name}], statuses:[{id,name,isClosed}], customFields:[{id,name,fieldFormat,possibleValues}], priorities:[...], users:[{id,name}]}",
+ json_schema_extra={"label": "Schema-Cache", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
+ )
+ schemaCachedAt: Optional[float] = Field(
+ default=None,
+ description="UTC timestamp when schemaCache was last refreshed",
+ json_schema_extra={"label": "Schema-Cache-Zeit", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+ schemaCacheTtlSeconds: Optional[int] = Field(
+ default=24 * 60 * 60,
+ description="Schema cache TTL in seconds (default 24h). Optional to tolerate NULL rows from auto-DDL upgrades.",
+ json_schema_extra={"label": "Schema-Cache-TTL (s)", "frontend_type": "number", "frontend_readonly": False, "frontend_required": False},
+ )
+ isActive: Optional[bool] = Field(
+ default=True,
+ description="Whether this connection is active",
+ json_schema_extra={"label": "Aktiv", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
+ )
+ lastConnectedAt: Optional[float] = Field(
+ default=None,
+ description="Timestamp of the last successful whoAmI() call",
+ json_schema_extra={"label": "Letzte Verbindung", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+
+ # ---- Sync state (incremental ticket mirror) ---------------------------
+ lastSyncAt: Optional[float] = Field(
+ default=None,
+ description="UTC timestamp of the last successful (incremental) mirror sync",
+ json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+ lastFullSyncAt: Optional[float] = Field(
+ default=None,
+ description="UTC timestamp of the last full mirror sync (force=true)",
+ json_schema_extra={"label": "Letzter Full-Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+ lastSyncDurationMs: Optional[int] = Field(
+ default=None,
+ description="Duration of the last sync in milliseconds",
+ json_schema_extra={"label": "Letzte Sync-Dauer (ms)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
+ )
+ lastSyncTicketCount: Optional[int] = Field(
+ default=None,
+ description="Number of tickets upserted in the last sync",
+ json_schema_extra={"label": "Tickets im letzten Sync", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
+ )
+ lastSyncErrorAt: Optional[float] = Field(
+ default=None,
+ description="UTC timestamp of the last failed sync",
+ json_schema_extra={"label": "Letzter Sync-Fehler", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+ lastSyncErrorMessage: Optional[str] = Field(
+ default=None,
+ description="Error message of the last failed sync",
+ json_schema_extra={"label": "Letzter Fehler", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+
+
+@i18nModel("Redmine-Ticket (Mirror)")
+class RedmineTicketMirror(PowerOnModel):
+ """Local mirror of a Redmine issue.
+
+ Composite uniqueness: ``(featureInstanceId, redmineId)``. We do not
+ enforce it via a DB constraint -- the sync logic looks up by these
+ two columns and does an upsert.
+ """
+
+ @model_validator(mode="before")
+ @classmethod
+ def _applyDefaults(cls, values):
+ return _coerceNoneToDefaults(cls, values)
+
+ id: str = Field(
+ default_factory=lambda: str(uuid.uuid4()),
+ description="Primary key",
+ json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+ featureInstanceId: str = Field(
+ description="FK -> FeatureInstance.id",
+ json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
+ "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
+ )
+ mandateId: Optional[str] = Field(
+ default=None,
+ json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+ redmineId: int = Field(
+ description="Redmine issue id (unique per feature instance)",
+ json_schema_extra={"label": "Redmine-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
+ )
+ subject: str = Field(default="", json_schema_extra={"label": "Titel", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
+ description: str = Field(default="", json_schema_extra={"label": "Beschreibung", "frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
+ trackerId: Optional[int] = Field(default=None, json_schema_extra={"label": "Tracker-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ trackerName: Optional[str] = Field(default=None, json_schema_extra={"label": "Tracker", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ statusId: Optional[int] = Field(default=None, json_schema_extra={"label": "Status-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ statusName: Optional[str] = Field(default=None, json_schema_extra={"label": "Status", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ isClosed: bool = Field(default=False, json_schema_extra={"label": "Geschlossen", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False})
+ priorityId: Optional[int] = Field(default=None, json_schema_extra={"label": "Prio-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ priorityName: Optional[str] = Field(default=None, json_schema_extra={"label": "Prioritaet", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ assignedToId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zuweisung-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ assignedToName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zuweisung", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ authorId: Optional[int] = Field(default=None, json_schema_extra={"label": "Autor-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ authorName: Optional[str] = Field(default=None, json_schema_extra={"label": "Autor", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ parentId: Optional[int] = Field(default=None, json_schema_extra={"label": "Parent-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ fixedVersionId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zielversion-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
+ fixedVersionName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zielversion", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
+ createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
+ json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
+ updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
+ json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
+ customFields: Optional[List[Dict[str, Any]]] = Field(
+ default=None,
+ description="List of {id,name,value} as returned by Redmine; stored as JSON",
+ json_schema_extra={"label": "Custom Fields", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False},
+ )
+ raw: Optional[Dict[str, Any]] = Field(
+ default=None,
+ description="Original Redmine issue payload (full)",
+ json_schema_extra={"label": "Roh-Payload", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
+ )
+ syncedAt: Optional[float] = Field(
+ default=None,
+ description="UTC epoch when this row was last upserted from Redmine",
+ json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+
+
+@i18nModel("Redmine-Beziehung (Mirror)")
+class RedmineRelationMirror(PowerOnModel):
+ """Local mirror of a Redmine issue relation.
+
+ Composite uniqueness: ``(featureInstanceId, redmineRelationId)``.
+ """
+
+ @model_validator(mode="before")
+ @classmethod
+ def _applyDefaults(cls, values):
+ return _coerceNoneToDefaults(cls, values)
+
+ id: str = Field(
+ default_factory=lambda: str(uuid.uuid4()),
+ json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+ featureInstanceId: str = Field(
+ description="FK -> FeatureInstance.id",
+ json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
+ "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
+ )
+ redmineRelationId: int = Field(
+ description="Redmine relation id (unique per feature instance)",
+ json_schema_extra={"label": "Relation-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
+ )
+ issueId: int = Field(
+ description="Source issue id (issue.id from Redmine)",
+ json_schema_extra={"label": "Source-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
+ )
+ issueToId: int = Field(
+ description="Target issue id (issue_to_id from Redmine)",
+ json_schema_extra={"label": "Target-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
+ )
+ relationType: str = Field(
+ default="relates",
+ json_schema_extra={"label": "Beziehungstyp", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
+ )
+ delay: Optional[int] = Field(
+ default=None,
+ json_schema_extra={"label": "Verzoegerung (Tage)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
+ )
+ syncedAt: Optional[float] = Field(
+ default=None,
+ json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
+ )
+
+
+# ---------------------------------------------------------------------------
+# Transport DTOs (not persisted)
+# ---------------------------------------------------------------------------
+
+class RedmineRelationDto(BaseModel):
+ id: int = Field(description="Relation id")
+ issueId: int = Field(description="Source issue id (issue.id from Redmine)")
+ issueToId: int = Field(description="Target issue id (issue_to_id from Redmine)")
+ relationType: str = Field(description="relates | precedes | follows | blocks | blocked | duplicates | duplicated | copied_to | copied_from | parent")
+ delay: Optional[int] = Field(default=None, description="Delay in days (precedes/follows only)")
+
+
+class RedmineCustomFieldValueDto(BaseModel):
+ id: int
+ name: str
+ value: Any = None
+
+
+class RedmineTicketDto(BaseModel):
+ """Normalised Redmine issue used by the UI and the AI tools."""
+
+ id: int = Field(description="Redmine issue id")
+ subject: str = Field(default="")
+ description: str = Field(default="")
+ trackerId: Optional[int] = None
+ trackerName: Optional[str] = None
+ statusId: Optional[int] = None
+ statusName: Optional[str] = None
+ isClosed: bool = False
+ priorityId: Optional[int] = None
+ priorityName: Optional[str] = None
+ assignedToId: Optional[int] = None
+ assignedToName: Optional[str] = None
+ authorId: Optional[int] = None
+ authorName: Optional[str] = None
+ parentId: Optional[int] = None
+ fixedVersionId: Optional[int] = None
+ fixedVersionName: Optional[str] = None
+ createdOn: Optional[str] = None
+ updatedOn: Optional[str] = None
+ customFields: List[RedmineCustomFieldValueDto] = Field(default_factory=list)
+ relations: List[RedmineRelationDto] = Field(default_factory=list)
+ raw: Optional[Dict[str, Any]] = None
+
+
+class RedmineFieldChoiceDto(BaseModel):
+ id: int
+ name: str
+ isClosed: Optional[bool] = Field(default=None, description="Status only: closed-state flag")
+
+
+class RedmineCustomFieldSchemaDto(BaseModel):
+ id: int
+ name: str
+ fieldFormat: str = Field(default="string")
+ isRequired: bool = False
+ possibleValues: List[str] = Field(default_factory=list)
+ multiple: bool = False
+ defaultValue: Optional[str] = None
+
+
+class RedmineFieldSchemaDto(BaseModel):
+ """Project meta returned by ``getProjectMeta``."""
+
+ projectId: str
+ projectName: str = ""
+ trackers: List[RedmineFieldChoiceDto] = Field(default_factory=list)
+ statuses: List[RedmineFieldChoiceDto] = Field(default_factory=list)
+ priorities: List[RedmineFieldChoiceDto] = Field(default_factory=list)
+ users: List[RedmineFieldChoiceDto] = Field(default_factory=list)
+ customFields: List[RedmineCustomFieldSchemaDto] = Field(default_factory=list)
+ rootTrackerName: str = "Userstory"
+ rootTrackerId: Optional[int] = Field(default=None, description="Resolved id of the configured rootTrackerName, or None if no matching tracker exists")
+
+
+# ---------------------------------------------------------------------------
+# Stats DTO -- raw buckets, mapped to ReportSection in the frontend
+# ---------------------------------------------------------------------------
+
+class RedmineStatsKpis(BaseModel):
+ total: int = 0
+ open: int = 0
+ closed: int = 0
+ closedInPeriod: int = 0
+ createdInPeriod: int = 0
+ orphans: int = 0
+
+
+class RedmineStatusByTrackerEntry(BaseModel):
+ trackerId: Optional[int] = None
+ trackerName: str = ""
+ countsByStatus: Dict[str, int] = Field(default_factory=dict)
+ total: int = 0
+
+
+class RedmineThroughputBucket(BaseModel):
+ bucketKey: str
+ label: str
+ created: int = 0
+ closed: int = 0
+
+
+class RedmineAssigneeBucket(BaseModel):
+ assignedToId: Optional[int] = None
+ name: str = "(nicht zugewiesen)"
+ open: int = 0
+
+
+class RedmineRelationDistributionEntry(BaseModel):
+ relationType: str
+ count: int = 0
+
+
+class RedmineAgingBucket(BaseModel):
+ bucketKey: str
+ label: str
+ minDays: int
+ maxDays: Optional[int] = None
+ count: int = 0
+
+
+class RedmineStatsDto(BaseModel):
+ """All sections needed by the Statistics page in one round-trip."""
+
+ instanceId: str
+ dateFrom: Optional[str] = None
+ dateTo: Optional[str] = None
+ bucket: str = "week"
+ trackerIds: List[int] = Field(default_factory=list)
+
+ kpis: RedmineStatsKpis = Field(default_factory=RedmineStatsKpis)
+ statusByTracker: List[RedmineStatusByTrackerEntry] = Field(default_factory=list)
+ throughput: List[RedmineThroughputBucket] = Field(default_factory=list)
+ topAssignees: List[RedmineAssigneeBucket] = Field(default_factory=list)
+ relationDistribution: List[RedmineRelationDistributionEntry] = Field(default_factory=list)
+ backlogAging: List[RedmineAgingBucket] = Field(default_factory=list)
+
+
+# ---------------------------------------------------------------------------
+# Sync DTO
+# ---------------------------------------------------------------------------
+
+class RedmineSyncResultDto(BaseModel):
+ instanceId: str
+ full: bool = Field(description="True if a full sync was performed (no incremental cursor)")
+ ticketsUpserted: int = 0
+ relationsUpserted: int = 0
+ durationMs: int = 0
+ lastSyncAt: float
+ error: Optional[str] = None
+
+
+class RedmineSyncStatusDto(BaseModel):
+ instanceId: str
+ lastSyncAt: Optional[float] = None
+ lastFullSyncAt: Optional[float] = None
+ lastSyncDurationMs: Optional[int] = None
+ lastSyncTicketCount: Optional[int] = None
+ lastSyncErrorAt: Optional[float] = None
+ lastSyncErrorMessage: Optional[str] = None
+ mirroredTicketCount: int = 0
+ mirroredRelationCount: int = 0
+
+
+# ---------------------------------------------------------------------------
+# Request bodies
+# ---------------------------------------------------------------------------
+
+class RedmineConfigUpdateRequest(BaseModel):
+ """PUT body for the config endpoint. Fields are all optional -- only
+ provided ones are updated. ``apiKey`` is encrypted before persistence."""
+
+ baseUrl: Optional[str] = None
+ projectId: Optional[str] = None
+ apiKey: Optional[str] = Field(default=None, description="Plain api key; will be encrypted server-side")
+ rootTrackerName: Optional[str] = None
+ defaultPeriodValue: Optional[Dict[str, Any]] = None
+ schemaCacheTtlSeconds: Optional[int] = None
+ isActive: Optional[bool] = None
+
+
+class RedmineConfigDto(BaseModel):
+ """Frontend-safe view of the config (no plain api key)."""
+
+ id: Optional[str] = None
+ featureInstanceId: str
+ mandateId: Optional[str] = None
+ baseUrl: str = ""
+ projectId: str = ""
+ hasApiKey: bool = False
+ rootTrackerName: str = "Userstory"
+ defaultPeriodValue: Optional[Dict[str, Any]] = None
+ schemaCacheTtlSeconds: int = 24 * 60 * 60
+ schemaCachedAt: Optional[float] = None
+ isActive: bool = True
+ lastConnectedAt: Optional[float] = None
+ lastSyncAt: Optional[float] = None
+ lastFullSyncAt: Optional[float] = None
+ lastSyncTicketCount: Optional[int] = None
+ lastSyncErrorMessage: Optional[str] = None
+
+
+class RedmineTicketUpdateRequest(BaseModel):
+ """Body for ``PUT /tickets/{id}``."""
+
+ subject: Optional[str] = None
+ description: Optional[str] = None
+ trackerId: Optional[int] = None
+ statusId: Optional[int] = None
+ priorityId: Optional[int] = None
+ assignedToId: Optional[int] = None
+ parentIssueId: Optional[int] = None
+ fixedVersionId: Optional[int] = None
+ notes: Optional[str] = None
+ customFields: Optional[Dict[int, Any]] = None
+
+
+class RedmineTicketCreateRequest(BaseModel):
+ """Body for ``POST /tickets``."""
+
+ subject: str
+ trackerId: int
+ description: Optional[str] = ""
+ statusId: Optional[int] = None
+ priorityId: Optional[int] = None
+ assignedToId: Optional[int] = None
+ parentIssueId: Optional[int] = None
+ fixedVersionId: Optional[int] = None
+ customFields: Optional[Dict[int, Any]] = None
+
+
+class RedmineRelationCreateRequest(BaseModel):
+ """Body for ``POST /tickets/{id}/relations``."""
+
+ issueToId: int
+ relationType: str = Field(default="relates")
+ delay: Optional[int] = None
diff --git a/modules/features/redmine/interfaceFeatureRedmine.py b/modules/features/redmine/interfaceFeatureRedmine.py
new file mode 100644
index 00000000..0f3991a2
--- /dev/null
+++ b/modules/features/redmine/interfaceFeatureRedmine.py
@@ -0,0 +1,449 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Interface for the Redmine feature.
+
+Owns:
+- Database connection to ``poweron_redmine``
+- CRUD on ``RedmineInstanceConfig`` (one row per FeatureInstance)
+- Encryption of the API key (``encryptValue`` keyed ``"redmineApiKey"``)
+- Resolution of the active config to a ``ConnectorTicketsRedmine`` instance
+"""
+
+from __future__ import annotations
+
+import logging
+import time
+from typing import Any, Dict, Optional
+
+from modules.connectors.connectorDbPostgre import DatabaseConnector
+from modules.connectors.connectorTicketsRedmine import ConnectorTicketsRedmine
+from modules.datamodels.datamodelUam import User
+from modules.features.redmine.datamodelRedmine import (
+ RedmineConfigDto,
+ RedmineConfigUpdateRequest,
+ RedmineInstanceConfig,
+ RedmineRelationMirror,
+ RedmineTicketMirror,
+)
+from modules.security.rbac import RbacClass
+from modules.shared.configuration import APP_CONFIG, decryptValue, encryptValue
+from modules.shared.dbRegistry import registerDatabase
+
+logger = logging.getLogger(__name__)
+
+
+redmineDatabase = "poweron_redmine"
+registerDatabase(redmineDatabase)
+
+
+_redmineInterfaces: Dict[str, "RedmineObjects"] = {}
+
+
+class RedmineObjects:
+ """Per-user, per-instance Redmine interface."""
+
+ def __init__(
+ self,
+ currentUser: User,
+ mandateId: Optional[str] = None,
+ featureInstanceId: Optional[str] = None,
+ ) -> None:
+ self.currentUser = currentUser
+ self.userId = currentUser.id if currentUser else None
+ self.mandateId = mandateId
+ self.featureInstanceId = featureInstanceId
+
+ self._initializeDatabase()
+
+ from modules.security.rootAccess import getRootDbAppConnector
+ dbApp = getRootDbAppConnector()
+ self.rbac = RbacClass(self.db, dbApp=dbApp)
+
+ self.db.updateContext(self.userId)
+
+ # ------------------------------------------------------------------
+ # DB bootstrap
+ # ------------------------------------------------------------------
+
+ def _initializeDatabase(self) -> None:
+ self.db = DatabaseConnector(
+ dbHost=APP_CONFIG.get("DB_HOST", "_no_config_default_data"),
+ dbDatabase=redmineDatabase,
+ dbUser=APP_CONFIG.get("DB_USER"),
+ dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
+ dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
+ userId=self.userId,
+ )
+ logger.debug(f"Redmine database initialized for user {self.userId}")
+
+ def setUserContext(
+ self,
+ currentUser: User,
+ mandateId: Optional[str] = None,
+ featureInstanceId: Optional[str] = None,
+ ) -> None:
+ self.currentUser = currentUser
+ self.userId = currentUser.id if currentUser else None
+ self.mandateId = mandateId
+ self.featureInstanceId = featureInstanceId
+ self.db.updateContext(self.userId)
+
+ # ------------------------------------------------------------------
+ # Config CRUD
+ # ------------------------------------------------------------------
+
+ def _findConfigRecord(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
+ records = self.db.getRecordset(
+ RedmineInstanceConfig,
+ recordFilter={"featureInstanceId": featureInstanceId},
+ )
+ if not records:
+ return None
+ return dict(records[0])
+
+ def getConfig(self, featureInstanceId: str) -> Optional[RedmineInstanceConfig]:
+ record = self._findConfigRecord(featureInstanceId)
+ if not record:
+ return None
+ return RedmineInstanceConfig(**{k: v for k, v in record.items() if not k.startswith("_")})
+
+ def getConfigDto(self, featureInstanceId: str) -> RedmineConfigDto:
+ cfg = self.getConfig(featureInstanceId)
+ if not cfg:
+ return RedmineConfigDto(
+ featureInstanceId=featureInstanceId,
+ mandateId=self.mandateId,
+ )
+ return RedmineConfigDto(
+ id=cfg.id,
+ featureInstanceId=cfg.featureInstanceId,
+ mandateId=cfg.mandateId,
+ baseUrl=cfg.baseUrl or "",
+ projectId=cfg.projectId or "",
+ hasApiKey=bool(cfg.encryptedApiKey),
+ rootTrackerName=cfg.rootTrackerName or "Userstory",
+ defaultPeriodValue=cfg.defaultPeriodValue,
+ schemaCacheTtlSeconds=cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60,
+ schemaCachedAt=cfg.schemaCachedAt,
+ isActive=cfg.isActive if cfg.isActive is not None else True,
+ lastConnectedAt=cfg.lastConnectedAt,
+ lastSyncAt=cfg.lastSyncAt,
+ lastFullSyncAt=cfg.lastFullSyncAt,
+ lastSyncTicketCount=cfg.lastSyncTicketCount,
+ lastSyncErrorMessage=cfg.lastSyncErrorMessage,
+ )
+
+ def upsertConfig(
+ self,
+ featureInstanceId: str,
+ update: RedmineConfigUpdateRequest,
+ ) -> RedmineConfigDto:
+ existing = self._findConfigRecord(featureInstanceId)
+
+ data: Dict[str, Any] = {}
+ if update.baseUrl is not None:
+ data["baseUrl"] = update.baseUrl.strip().rstrip("/")
+ if update.projectId is not None:
+ data["projectId"] = update.projectId.strip()
+ if update.rootTrackerName is not None:
+ cleaned = update.rootTrackerName.strip()
+ if not cleaned:
+ raise ValueError("rootTrackerName must not be empty")
+ data["rootTrackerName"] = cleaned
+ if update.defaultPeriodValue is not None:
+ data["defaultPeriodValue"] = update.defaultPeriodValue
+ if update.schemaCacheTtlSeconds is not None:
+ data["schemaCacheTtlSeconds"] = int(update.schemaCacheTtlSeconds)
+ if update.isActive is not None:
+ data["isActive"] = bool(update.isActive)
+
+ if update.apiKey is not None:
+ apiKey = update.apiKey.strip()
+ if apiKey == "":
+ data["encryptedApiKey"] = ""
+ else:
+ data["encryptedApiKey"] = encryptValue(
+ apiKey,
+ userId=self.userId or "system",
+ keyName="redmineApiKey",
+ )
+
+ if existing:
+ self.db.recordModify(RedmineInstanceConfig, existing["id"], data)
+ else:
+ seed = RedmineInstanceConfig(
+ featureInstanceId=featureInstanceId,
+ mandateId=self.mandateId,
+ ).model_dump()
+ seed.update(data)
+ self.db.recordCreate(RedmineInstanceConfig, seed)
+
+ return self.getConfigDto(featureInstanceId)
+
+ def markConfigInvalid(self, featureInstanceId: str, reason: str = "") -> None:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return
+ self.db.recordModify(
+ RedmineInstanceConfig,
+ existing["id"],
+ {"lastConnectedAt": None},
+ )
+ if reason:
+ logger.warning(f"Redmine config {featureInstanceId} invalidated: {reason}")
+
+ def markConfigConnected(self, featureInstanceId: str) -> None:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return
+ self.db.recordModify(
+ RedmineInstanceConfig,
+ existing["id"],
+ {"lastConnectedAt": time.time()},
+ )
+
+ def updateSchemaCache(self, featureInstanceId: str, schema: Dict[str, Any]) -> None:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return
+ self.db.recordModify(
+ RedmineInstanceConfig,
+ existing["id"],
+ {"schemaCache": schema, "schemaCachedAt": time.time()},
+ )
+
+ # ------------------------------------------------------------------
+ # Connector resolution
+ # ------------------------------------------------------------------
+
+ def _decryptApiKey(self, encryptedApiKey: str) -> str:
+ if not encryptedApiKey:
+ return ""
+ try:
+ return decryptValue(
+ encryptedApiKey,
+ userId=self.userId or "system",
+ keyName="redmineApiKey",
+ )
+ except Exception as e:
+ logger.error(f"Failed to decrypt Redmine api key: {e}")
+ return ""
+
+ def resolveConnector(
+ self, featureInstanceId: str
+ ) -> Optional[ConnectorTicketsRedmine]:
+ cfg = self.getConfig(featureInstanceId)
+ if not cfg or not cfg.isActive:
+ return None
+ if not cfg.baseUrl or not cfg.projectId or not cfg.encryptedApiKey:
+ return None
+ apiKey = self._decryptApiKey(cfg.encryptedApiKey)
+ if not apiKey:
+ return None
+ return ConnectorTicketsRedmine(
+ baseUrl=cfg.baseUrl,
+ apiKey=apiKey,
+ projectId=cfg.projectId,
+ )
+
+ def deleteConfig(self, featureInstanceId: str) -> bool:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return False
+ self.db.recordDelete(RedmineInstanceConfig, existing["id"])
+ return True
+
+ # ------------------------------------------------------------------
+ # Sync state
+ # ------------------------------------------------------------------
+
+ def recordSyncSuccess(
+ self,
+ featureInstanceId: str,
+ *,
+ full: bool,
+ ticketsUpserted: int,
+ durationMs: int,
+ lastSyncAt: float,
+ ) -> None:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return
+ update: Dict[str, Any] = {
+ "lastSyncAt": float(lastSyncAt),
+ "lastSyncDurationMs": int(durationMs),
+ "lastSyncTicketCount": int(ticketsUpserted),
+ "lastSyncErrorAt": None,
+ "lastSyncErrorMessage": None,
+ }
+ if full:
+ update["lastFullSyncAt"] = float(lastSyncAt)
+ self.db.recordModify(RedmineInstanceConfig, existing["id"], update)
+
+ def recordSyncFailure(self, featureInstanceId: str, message: str) -> None:
+ existing = self._findConfigRecord(featureInstanceId)
+ if not existing:
+ return
+ self.db.recordModify(
+ RedmineInstanceConfig,
+ existing["id"],
+ {
+ "lastSyncErrorAt": time.time(),
+ "lastSyncErrorMessage": message[:1000] if message else "unknown error",
+ },
+ )
+
+ # ------------------------------------------------------------------
+ # Ticket mirror CRUD
+ # ------------------------------------------------------------------
+
+ def _findMirroredTicket(
+ self, featureInstanceId: str, redmineId: int
+ ) -> Optional[Dict[str, Any]]:
+ records = self.db.getRecordset(
+ RedmineTicketMirror,
+ recordFilter={"featureInstanceId": featureInstanceId, "redmineId": int(redmineId)},
+ )
+ if not records:
+ return None
+ return dict(records[0])
+
+ def upsertMirroredTicket(
+ self,
+ featureInstanceId: str,
+ redmineId: int,
+ record: Dict[str, Any],
+ ) -> str:
+ existing = self._findMirroredTicket(featureInstanceId, redmineId)
+ if existing:
+ update = {k: v for k, v in record.items() if k not in {"id"}}
+ self.db.recordModify(RedmineTicketMirror, existing["id"], update)
+ return existing["id"]
+ else:
+ new = self.db.recordCreate(RedmineTicketMirror, record)
+ return new.get("id") if isinstance(new, dict) else record.get("id")
+
+ def deleteMirroredTicket(self, featureInstanceId: str, redmineId: int) -> bool:
+ existing = self._findMirroredTicket(featureInstanceId, redmineId)
+ if not existing:
+ return False
+ self.db.recordDelete(RedmineTicketMirror, existing["id"])
+ return True
+
+ def listMirroredTickets(
+ self,
+ featureInstanceId: str,
+ *,
+ trackerIds: Optional[list] = None,
+ statusIds: Optional[list] = None,
+ assigneeId: Optional[int] = None,
+ updatedFromTs: Optional[float] = None,
+ updatedToTs: Optional[float] = None,
+ ) -> list:
+ recordFilter: Dict[str, Any] = {"featureInstanceId": featureInstanceId}
+ records = self.db.getRecordset(RedmineTicketMirror, recordFilter=recordFilter)
+ out = []
+ for r in records or []:
+ d = dict(r)
+ if trackerIds and d.get("trackerId") not in trackerIds:
+ continue
+ if statusIds and d.get("statusId") not in statusIds:
+ continue
+ if assigneeId is not None and d.get("assignedToId") != assigneeId:
+ continue
+ uts = d.get("updatedOnTs")
+ if updatedFromTs is not None and (uts is None or uts < updatedFromTs):
+ continue
+ if updatedToTs is not None and (uts is None or uts > updatedToTs):
+ continue
+ out.append(d)
+ return out
+
+ def countMirroredTickets(self, featureInstanceId: str) -> int:
+ records = self.db.getRecordset(
+ RedmineTicketMirror,
+ recordFilter={"featureInstanceId": featureInstanceId},
+ )
+ return len(records or [])
+
+ # ------------------------------------------------------------------
+ # Relation mirror CRUD
+ # ------------------------------------------------------------------
+
+ def insertMirroredRelation(self, featureInstanceId: str, record: Dict[str, Any]) -> None:
+ self.db.recordCreate(RedmineRelationMirror, record)
+
+ def deleteMirroredRelationsForIssue(self, featureInstanceId: str, issueId: int) -> int:
+ records_a = self.db.getRecordset(
+ RedmineRelationMirror,
+ recordFilter={"featureInstanceId": featureInstanceId, "issueId": int(issueId)},
+ ) or []
+ records_b = self.db.getRecordset(
+ RedmineRelationMirror,
+ recordFilter={"featureInstanceId": featureInstanceId, "issueToId": int(issueId)},
+ ) or []
+ deleted = 0
+ seen = set()
+ for r in list(records_a) + list(records_b):
+ rid = r.get("id")
+ if not rid or rid in seen:
+ continue
+ seen.add(rid)
+ self.db.recordDelete(RedmineRelationMirror, rid)
+ deleted += 1
+ return deleted
+
+ def listMirroredRelations(self, featureInstanceId: str) -> list:
+ records = self.db.getRecordset(
+ RedmineRelationMirror,
+ recordFilter={"featureInstanceId": featureInstanceId},
+ )
+ return [dict(r) for r in (records or [])]
+
+ def countMirroredRelations(self, featureInstanceId: str) -> int:
+ return len(self.db.getRecordset(
+ RedmineRelationMirror,
+ recordFilter={"featureInstanceId": featureInstanceId},
+ ) or [])
+
+ def deleteMirroredRelationByRedmineId(
+ self, featureInstanceId: str, redmineRelationId: int
+ ) -> bool:
+ records = self.db.getRecordset(
+ RedmineRelationMirror,
+ recordFilter={"featureInstanceId": featureInstanceId, "redmineRelationId": int(redmineRelationId)},
+ )
+ if not records:
+ return False
+ self.db.recordDelete(RedmineRelationMirror, records[0]["id"])
+ return True
+
+
+def getInterface(
+ currentUser: Optional[User] = None,
+ mandateId: Optional[str] = None,
+ featureInstanceId: Optional[str] = None,
+) -> RedmineObjects:
+ if not currentUser:
+ raise ValueError("Invalid user context: user is required")
+
+ effectiveMandateId = str(mandateId) if mandateId else None
+ effectiveFeatureInstanceId = str(featureInstanceId) if featureInstanceId else None
+
+ contextKey = (
+ f"redmine_{effectiveMandateId}_{effectiveFeatureInstanceId}_{currentUser.id}"
+ )
+
+ if contextKey not in _redmineInterfaces:
+ _redmineInterfaces[contextKey] = RedmineObjects(
+ currentUser,
+ mandateId=effectiveMandateId,
+ featureInstanceId=effectiveFeatureInstanceId,
+ )
+ else:
+ _redmineInterfaces[contextKey].setUserContext(
+ currentUser,
+ mandateId=effectiveMandateId,
+ featureInstanceId=effectiveFeatureInstanceId,
+ )
+ return _redmineInterfaces[contextKey]
diff --git a/modules/features/redmine/mainRedmine.py b/modules/features/redmine/mainRedmine.py
new file mode 100644
index 00000000..ed60d7b0
--- /dev/null
+++ b/modules/features/redmine/mainRedmine.py
@@ -0,0 +1,329 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine Feature Container -- Main Module.
+
+Defines the feature metadata and registers RBAC objects + template roles
+in the catalog. Loaded automatically by ``modules.system.registry``.
+"""
+
+from __future__ import annotations
+
+import logging
+from typing import Any, Dict, List
+
+logger = logging.getLogger(__name__)
+
+
+FEATURE_CODE = "redmine"
+FEATURE_LABEL = "Redmine"
+FEATURE_ICON = "mdi-bug-outline"
+
+
+UI_OBJECTS: List[Dict[str, Any]] = [
+ {"objectKey": "ui.feature.redmine.stats", "label": "Statistik", "meta": {"area": "stats", "isDefault": True}},
+ {"objectKey": "ui.feature.redmine.browser", "label": "Ticket-Browser", "meta": {"area": "browser"}},
+ {"objectKey": "ui.feature.redmine.settings", "label": "Einstellungen", "meta": {"area": "settings", "admin_only": True}},
+]
+
+
+DATA_OBJECTS: List[Dict[str, Any]] = [
+ {"objectKey": "data.feature.redmine.config", "label": "Konfiguration", "meta": {"isGroup": True}},
+ {
+ "objectKey": "data.feature.redmine.RedmineInstanceConfig",
+ "label": "Redmine-Verbindung",
+ "meta": {
+ "table": "RedmineInstanceConfig",
+ "group": "data.feature.redmine.config",
+ "fields": ["id", "baseUrl", "projectId", "rootTrackerName", "isActive", "lastConnectedAt", "lastSyncAt"],
+ },
+ },
+ {
+ "objectKey": "data.feature.redmine.RedmineTicketMirror",
+ "label": "Redmine-Tickets (Mirror)",
+ "meta": {
+ "table": "RedmineTicketMirror",
+ "group": "data.feature.redmine.config",
+ "fields": ["redmineId", "subject", "trackerName", "statusName", "assignedToName", "updatedOn"],
+ },
+ },
+ {
+ "objectKey": "data.feature.redmine.RedmineRelationMirror",
+ "label": "Redmine-Beziehungen (Mirror)",
+ "meta": {
+ "table": "RedmineRelationMirror",
+ "group": "data.feature.redmine.config",
+ "fields": ["redmineRelationId", "issueId", "issueToId", "relationType"],
+ },
+ },
+ {
+ "objectKey": "data.feature.redmine.*",
+ "label": "Alle Redmine-Daten",
+ "meta": {"wildcard": True, "description": "Wildcard for all redmine data tables"},
+ },
+]
+
+
+RESOURCE_OBJECTS: List[Dict[str, Any]] = [
+ {
+ "objectKey": "resource.feature.redmine.tickets.read",
+ "label": "Tickets lesen",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "GET"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.tickets.create",
+ "label": "Tickets erstellen",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "POST"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.tickets.update",
+ "label": "Tickets bearbeiten",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "PUT"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.tickets.delete",
+ "label": "Tickets loeschen / archivieren",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "DELETE"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.relations.manage",
+ "label": "Beziehungen verwalten",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}/relations", "method": "ALL"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.stats.read",
+ "label": "Statistik einsehen",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/stats", "method": "GET"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.config.manage",
+ "label": "Verbindung verwalten",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/config", "method": "ALL", "admin_only": True},
+ },
+ {
+ "objectKey": "resource.feature.redmine.config.test",
+ "label": "Verbindung testen",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/config/test", "method": "POST", "admin_only": True},
+ },
+ {
+ "objectKey": "resource.feature.redmine.sync.run",
+ "label": "Mirror synchronisieren",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/sync", "method": "POST", "admin_only": True},
+ },
+ {
+ "objectKey": "resource.feature.redmine.sync.status",
+ "label": "Sync-Status lesen",
+ "meta": {"endpoint": "/api/redmine/{instanceId}/sync/status", "method": "GET"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.workflows.view",
+ "label": "Workflows einsehen",
+ "meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"},
+ },
+ {
+ "objectKey": "resource.feature.redmine.workflows.execute",
+ "label": "Workflows ausfuehren",
+ "meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"},
+ },
+]
+
+
+TEMPLATE_ROLES: List[Dict[str, Any]] = [
+ {
+ "roleLabel": "redmine-viewer",
+ "description": "Redmine-Betrachter -- Tickets und Statistik lesen",
+ "accessRules": [
+ {"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
+ {"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
+ {"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
+ ],
+ },
+ {
+ "roleLabel": "redmine-editor",
+ "description": "Redmine-Bearbeiter -- Tickets erstellen, bearbeiten, Beziehungen pflegen",
+ "accessRules": [
+ {"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
+ {"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
+ {"context": "DATA", "item": None, "view": True, "read": "g", "create": "g", "update": "g", "delete": "n"},
+ ],
+ },
+ {
+ "roleLabel": "redmine-admin",
+ "description": "Redmine-Administrator -- Vollzugriff inkl. Einstellungen und Verbindung",
+ "accessRules": [
+ {"context": "UI", "item": None, "view": True},
+ {"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.config.manage", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.config.test", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
+ {"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
+ ],
+ },
+]
+
+
+# ---------------------------------------------------------------------------
+# Public discovery API (called by registry.py)
+# ---------------------------------------------------------------------------
+
+def getFeatureDefinition() -> Dict[str, Any]:
+ return {"code": FEATURE_CODE, "label": FEATURE_LABEL, "icon": FEATURE_ICON}
+
+
+def getUiObjects() -> List[Dict[str, Any]]:
+ return UI_OBJECTS
+
+
+def getResourceObjects() -> List[Dict[str, Any]]:
+ return RESOURCE_OBJECTS
+
+
+def getDataObjects() -> List[Dict[str, Any]]:
+ return DATA_OBJECTS
+
+
+def getTemplateRoles() -> List[Dict[str, Any]]:
+ return TEMPLATE_ROLES
+
+
+def getTemplateWorkflows() -> List[Dict[str, Any]]:
+ return []
+
+
+def registerFeature(catalogService) -> bool:
+ """Register UI / Resource / Data objects and sync template roles."""
+ try:
+ for uiObj in UI_OBJECTS:
+ catalogService.registerUiObject(
+ featureCode=FEATURE_CODE,
+ objectKey=uiObj["objectKey"],
+ label=uiObj["label"],
+ meta=uiObj.get("meta"),
+ )
+ for resObj in RESOURCE_OBJECTS:
+ catalogService.registerResourceObject(
+ featureCode=FEATURE_CODE,
+ objectKey=resObj["objectKey"],
+ label=resObj["label"],
+ meta=resObj.get("meta"),
+ )
+ for dataObj in DATA_OBJECTS:
+ catalogService.registerDataObject(
+ featureCode=FEATURE_CODE,
+ objectKey=dataObj["objectKey"],
+ label=dataObj["label"],
+ meta=dataObj.get("meta"),
+ )
+ _syncTemplateRolesToDb()
+ logger.info(
+ f"Feature '{FEATURE_CODE}' registered "
+ f"{len(UI_OBJECTS)} UI, {len(RESOURCE_OBJECTS)} resource, {len(DATA_OBJECTS)} data objects"
+ )
+ return True
+ except Exception as e:
+ logger.error(f"Failed to register feature '{FEATURE_CODE}': {e}")
+ return False
+
+
+# ---------------------------------------------------------------------------
+# Template-role sync (mirrors the trustee implementation)
+# ---------------------------------------------------------------------------
+
+def _syncTemplateRolesToDb() -> int:
+ try:
+ from modules.datamodels.datamodelRbac import (
+ AccessRule,
+ AccessRuleContext,
+ Role,
+ )
+ from modules.datamodels.datamodelUtils import coerce_text_multilingual
+ from modules.interfaces.interfaceDbApp import getRootInterface
+
+ rootInterface = getRootInterface()
+ existingRoles = rootInterface.getRolesByFeatureCode(FEATURE_CODE)
+ templateRoles = [r for r in existingRoles if r.mandateId is None]
+ existingByLabel: Dict[str, str] = {r.roleLabel: str(r.id) for r in templateRoles}
+
+ createdCount = 0
+ for roleTemplate in TEMPLATE_ROLES:
+ roleLabel = roleTemplate["roleLabel"]
+ if roleLabel in existingByLabel:
+ _ensureAccessRulesForRole(
+ rootInterface,
+ existingByLabel[roleLabel],
+ roleTemplate.get("accessRules", []),
+ )
+ continue
+ newRole = Role(
+ roleLabel=roleLabel,
+ description=coerce_text_multilingual(roleTemplate.get("description", {})),
+ featureCode=FEATURE_CODE,
+ mandateId=None,
+ featureInstanceId=None,
+ isSystemRole=False,
+ )
+ createdRole = rootInterface.db.recordCreate(Role, newRole.model_dump())
+ roleId = createdRole.get("id")
+ _ensureAccessRulesForRole(rootInterface, roleId, roleTemplate.get("accessRules", []))
+ logger.info(f"Created template role '{roleLabel}' with ID {roleId}")
+ createdCount += 1
+
+ return createdCount
+ except Exception as e:
+ logger.error(f"Error syncing template roles for feature '{FEATURE_CODE}': {e}")
+ return 0
+
+
+def _ensureAccessRulesForRole(
+ rootInterface, roleId: str, ruleTemplates: List[Dict[str, Any]]
+) -> int:
+ from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
+
+ existingRules = rootInterface.getAccessRulesByRole(roleId)
+ existingSignatures: set[Any] = set()
+ for rule in existingRules:
+ sig = (rule.context.value if rule.context else None, rule.item)
+ existingSignatures.add(sig)
+
+ createdCount = 0
+ for template in ruleTemplates:
+ context = template.get("context", "UI")
+ item = template.get("item")
+ if (context, item) in existingSignatures:
+ continue
+ if context == "UI":
+ contextEnum = AccessRuleContext.UI
+ elif context == "DATA":
+ contextEnum = AccessRuleContext.DATA
+ elif context == "RESOURCE":
+ contextEnum = AccessRuleContext.RESOURCE
+ else:
+ contextEnum = context
+ newRule = AccessRule(
+ roleId=roleId,
+ context=contextEnum,
+ item=item,
+ view=template.get("view", False),
+ read=template.get("read"),
+ create=template.get("create"),
+ update=template.get("update"),
+ delete=template.get("delete"),
+ )
+ rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
+ createdCount += 1
+ return createdCount
diff --git a/modules/features/redmine/routeFeatureRedmine.py b/modules/features/redmine/routeFeatureRedmine.py
new file mode 100644
index 00000000..ad968001
--- /dev/null
+++ b/modules/features/redmine/routeFeatureRedmine.py
@@ -0,0 +1,478 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""FastAPI routes for the Redmine feature.
+
+URL pattern: ``/api/redmine/{instanceId}/...`` -- mirrors the Trustee /
+CommCoach pattern. Every endpoint validates that the feature instance
+exists and resolves its ``mandateId``. Audit log is written for every
+write call.
+"""
+
+import logging
+from typing import Any, Dict, List, Optional
+
+from fastapi import APIRouter, Body, Depends, HTTPException, Query, Request
+
+from modules.auth import RequestContext, getRequestContext, limiter
+from modules.features.redmine import interfaceFeatureRedmine as interfaceDb
+from modules.features.redmine import (
+ serviceRedmine,
+ serviceRedmineStats,
+ serviceRedmineSync,
+)
+from modules.features.redmine.datamodelRedmine import (
+ RedmineConfigDto,
+ RedmineConfigUpdateRequest,
+ RedmineFieldSchemaDto,
+ RedmineRelationCreateRequest,
+ RedmineStatsDto,
+ RedmineSyncResultDto,
+ RedmineSyncStatusDto,
+ RedmineTicketCreateRequest,
+ RedmineTicketDto,
+ RedmineTicketUpdateRequest,
+)
+from modules.features.redmine.serviceRedmine import RedmineNotConfiguredError
+from modules.connectors.connectorTicketsRedmine import RedmineApiError
+from modules.interfaces.interfaceDbApp import getRootInterface
+from modules.interfaces.interfaceFeatures import getFeatureInterface
+from modules.shared.i18nRegistry import apiRouteContext
+
+routeApiMsg = apiRouteContext("routeFeatureRedmine")
+logger = logging.getLogger(__name__)
+
+
+router = APIRouter(
+ prefix="/api/redmine",
+ tags=["Redmine"],
+ responses={404: {"description": "Not found"}},
+)
+
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+def _audit(
+ context: RequestContext,
+ action: str,
+ resourceType: Optional[str] = None,
+ resourceId: Optional[str] = None,
+ details: str = "",
+ success: bool = True,
+ errorMessage: Optional[str] = None,
+) -> None:
+ try:
+ from modules.shared.auditLogger import audit_logger
+ audit_logger.logEvent(
+ userId=str(context.user.id),
+ mandateId=str(context.mandateId) if context.mandateId else None,
+ featureInstanceId=getattr(context, "featureInstanceId", None),
+ category="redmine",
+ action=action,
+ resourceType=resourceType,
+ resourceId=resourceId,
+ details=details,
+ success=success,
+ errorMessage=errorMessage,
+ )
+ except Exception as e:
+ logger.debug(f"Redmine audit log failed: {e}")
+
+
+def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
+ """Returns the resolved ``mandateId`` for the instance."""
+ rootInterface = getRootInterface()
+ featureInterface = getFeatureInterface(rootInterface.db)
+ instance = featureInterface.getFeatureInstance(instanceId)
+ if not instance:
+ raise HTTPException(
+ status_code=404,
+ detail=routeApiMsg(f"Feature instance '{instanceId}' not found"),
+ )
+ mandateId = (
+ instance.get("mandateId")
+ if isinstance(instance, dict)
+ else getattr(instance, "mandateId", None)
+ )
+ if not mandateId:
+ raise HTTPException(
+ status_code=500,
+ detail=routeApiMsg("Feature instance has no mandateId"),
+ )
+ return str(mandateId)
+
+
+def _toHttpStatus(e: RedmineApiError) -> int:
+ if e.status in (400, 401, 403, 404, 409, 422):
+ return e.status
+ return 502
+
+
+def _handleRedmineError(e: RedmineApiError) -> HTTPException:
+ return HTTPException(status_code=_toHttpStatus(e), detail=f"Redmine: {e}")
+
+
+# ---------------------------------------------------------------------------
+# Config
+# ---------------------------------------------------------------------------
+
+@router.get("/{instanceId}/config", response_model=RedmineConfigDto)
+@limiter.limit("60/minute")
+async def getConfig(
+ request: Request,
+ instanceId: str,
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineConfigDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
+ return iface.getConfigDto(instanceId)
+
+
+@router.put("/{instanceId}/config", response_model=RedmineConfigDto)
+@limiter.limit("20/minute")
+async def updateConfig(
+ request: Request,
+ instanceId: str,
+ body: RedmineConfigUpdateRequest = Body(...),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineConfigDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
+ dto = iface.upsertConfig(instanceId, body)
+ _audit(
+ context,
+ "redmine.config.updated",
+ "RedmineInstanceConfig",
+ instanceId,
+ details=f"baseUrl={dto.baseUrl} projectId={dto.projectId} hasApiKey={dto.hasApiKey}",
+ )
+ return dto
+
+
+@router.delete("/{instanceId}/config")
+@limiter.limit("20/minute")
+async def deleteConfig(
+ request: Request,
+ instanceId: str,
+ context: RequestContext = Depends(getRequestContext),
+) -> Dict[str, Any]:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
+ deleted = iface.deleteConfig(instanceId)
+ _audit(context, "redmine.config.deleted", "RedmineInstanceConfig", instanceId, success=deleted)
+ return {"deleted": deleted}
+
+
+@router.post("/{instanceId}/config/test")
+@limiter.limit("20/minute")
+async def testConfig(
+ request: Request,
+ instanceId: str,
+ context: RequestContext = Depends(getRequestContext),
+) -> Dict[str, Any]:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ result = await serviceRedmine.testConnection(context.user, mandateId, instanceId)
+ _audit(
+ context,
+ "redmine.config.test",
+ "RedmineInstanceConfig",
+ instanceId,
+ success=bool(result.get("ok")),
+ errorMessage=str(result.get("message")) if not result.get("ok") else None,
+ )
+ return result
+
+
+# ---------------------------------------------------------------------------
+# Schema
+# ---------------------------------------------------------------------------
+
+@router.get("/{instanceId}/schema", response_model=RedmineFieldSchemaDto)
+@limiter.limit("60/minute")
+async def getSchema(
+ request: Request,
+ instanceId: str,
+ forceRefresh: bool = Query(False),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineFieldSchemaDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ return await serviceRedmine.getProjectMeta(
+ context.user, mandateId, instanceId, forceRefresh=forceRefresh
+ )
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ raise _handleRedmineError(e)
+
+
+# ---------------------------------------------------------------------------
+# Sync (mirror)
+# ---------------------------------------------------------------------------
+
+@router.post("/{instanceId}/sync", response_model=RedmineSyncResultDto)
+@limiter.limit("6/minute")
+async def runSync(
+ request: Request,
+ instanceId: str,
+ force: bool = Query(default=False, description="True -> ignore lastSyncAt and pull every issue."),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineSyncResultDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ result = await serviceRedmineSync.runSync(
+ context.user, mandateId, instanceId, force=force
+ )
+ _audit(
+ context,
+ "redmine.sync.completed",
+ "RedmineInstanceConfig",
+ instanceId,
+ details=f"full={result.full} tickets={result.ticketsUpserted} relations={result.relationsUpserted} {result.durationMs}ms",
+ )
+ return result
+ except RedmineApiError as e:
+ _audit(
+ context,
+ "redmine.sync.completed",
+ "RedmineInstanceConfig",
+ instanceId,
+ success=False,
+ errorMessage=str(e),
+ )
+ raise _handleRedmineError(e)
+ except Exception as e:
+ _audit(
+ context,
+ "redmine.sync.completed",
+ "RedmineInstanceConfig",
+ instanceId,
+ success=False,
+ errorMessage=str(e),
+ )
+ raise HTTPException(status_code=500, detail=f"Sync failed: {e}")
+
+
+@router.get("/{instanceId}/sync/status", response_model=RedmineSyncStatusDto)
+@limiter.limit("60/minute")
+async def getSyncStatus(
+ request: Request,
+ instanceId: str,
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineSyncStatusDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ return serviceRedmineSync.getSyncStatus(context.user, mandateId, instanceId)
+
+
+# ---------------------------------------------------------------------------
+# Tickets
+# ---------------------------------------------------------------------------
+
+@router.get("/{instanceId}/tickets", response_model=List[RedmineTicketDto])
+@limiter.limit("60/minute")
+async def listTickets(
+ request: Request,
+ instanceId: str,
+ trackerIds: Optional[List[int]] = Query(default=None),
+ status: str = Query(default="*"),
+ dateFrom: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on >= dateFrom"),
+ dateTo: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on <= dateTo"),
+ assignedToId: Optional[int] = Query(default=None),
+ context: RequestContext = Depends(getRequestContext),
+) -> List[RedmineTicketDto]:
+ """Reads from the local mirror. Trigger a sync via ``POST /sync`` first."""
+ mandateId = _validateInstanceAccess(instanceId, context)
+ return serviceRedmine.listTickets(
+ context.user,
+ mandateId,
+ instanceId,
+ trackerIds=trackerIds,
+ statusFilter=status,
+ updatedOnFrom=dateFrom,
+ updatedOnTo=dateTo,
+ assignedToId=assignedToId,
+ )
+
+
+@router.get("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
+@limiter.limit("120/minute")
+async def getTicket(
+ request: Request,
+ instanceId: str,
+ issueId: int,
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineTicketDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ ticket = serviceRedmine.getTicket(context.user, mandateId, instanceId, issueId)
+ if ticket is None:
+ raise HTTPException(status_code=404, detail=f"Ticket {issueId} not in mirror; run a sync first.")
+ return ticket
+
+
+@router.post("/{instanceId}/tickets", response_model=RedmineTicketDto)
+@limiter.limit("30/minute")
+async def createTicket(
+ request: Request,
+ instanceId: str,
+ body: RedmineTicketCreateRequest = Body(...),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineTicketDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ ticket = await serviceRedmine.createTicket(context.user, mandateId, instanceId, body)
+ _audit(context, "redmine.ticket.created", "RedmineTicket", str(ticket.id), details=f"trackerId={body.trackerId}")
+ return ticket
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ _audit(context, "redmine.ticket.created", "RedmineTicket", "?", success=False, errorMessage=str(e))
+ raise _handleRedmineError(e)
+
+
+@router.put("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
+@limiter.limit("60/minute")
+async def updateTicket(
+ request: Request,
+ instanceId: str,
+ issueId: int,
+ body: RedmineTicketUpdateRequest = Body(...),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineTicketDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ ticket = await serviceRedmine.updateTicket(context.user, mandateId, instanceId, issueId, body)
+ _audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId))
+ return ticket
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ _audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
+ raise _handleRedmineError(e)
+
+
+@router.delete("/{instanceId}/tickets/{issueId}")
+@limiter.limit("30/minute")
+async def deleteTicket(
+ request: Request,
+ instanceId: str,
+ issueId: int,
+ fallbackStatusId: Optional[int] = Query(default=None, description="If Redmine forbids DELETE, set this status instead"),
+ context: RequestContext = Depends(getRequestContext),
+) -> Dict[str, Any]:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ result = await serviceRedmine.deleteTicket(
+ context.user, mandateId, instanceId, issueId, fallbackStatusId=fallbackStatusId
+ )
+ _audit(
+ context,
+ "redmine.ticket.deleted",
+ "RedmineTicket",
+ str(issueId),
+ success=bool(result.get("deleted") or result.get("archived")),
+ details=f"deleted={result.get('deleted')} archived={result.get('archived')}",
+ )
+ return result
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ _audit(context, "redmine.ticket.deleted", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
+ raise _handleRedmineError(e)
+
+
+# ---------------------------------------------------------------------------
+# Relations
+# ---------------------------------------------------------------------------
+
+@router.post("/{instanceId}/tickets/{issueId}/relations")
+@limiter.limit("30/minute")
+async def addRelation(
+ request: Request,
+ instanceId: str,
+ issueId: int,
+ body: RedmineRelationCreateRequest = Body(...),
+ context: RequestContext = Depends(getRequestContext),
+) -> Dict[str, Any]:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ rel = await serviceRedmine.addRelation(context.user, mandateId, instanceId, issueId, body)
+ _audit(
+ context,
+ "redmine.relation.created",
+ "RedmineRelation",
+ str(rel.get("id")),
+ details=f"{issueId} -[{body.relationType}]-> {body.issueToId}",
+ )
+ return {"relation": rel}
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ _audit(
+ context,
+ "redmine.relation.created",
+ "RedmineRelation",
+ f"{issueId}->{body.issueToId}",
+ success=False,
+ errorMessage=str(e),
+ )
+ raise _handleRedmineError(e)
+
+
+@router.delete("/{instanceId}/relations/{relationId}")
+@limiter.limit("30/minute")
+async def deleteRelation(
+ request: Request,
+ instanceId: str,
+ relationId: int,
+ context: RequestContext = Depends(getRequestContext),
+) -> Dict[str, Any]:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ ok = await serviceRedmine.deleteRelation(context.user, mandateId, instanceId, relationId)
+ _audit(context, "redmine.relation.deleted", "RedmineRelation", str(relationId), success=ok)
+ return {"deleted": ok}
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ _audit(
+ context,
+ "redmine.relation.deleted",
+ "RedmineRelation",
+ str(relationId),
+ success=False,
+ errorMessage=str(e),
+ )
+ raise _handleRedmineError(e)
+
+
+# ---------------------------------------------------------------------------
+# Stats
+# ---------------------------------------------------------------------------
+
+@router.get("/{instanceId}/stats", response_model=RedmineStatsDto)
+@limiter.limit("60/minute")
+async def getStats(
+ request: Request,
+ instanceId: str,
+ dateFrom: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
+ dateTo: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
+ bucket: str = Query(default="week", regex="^(day|week|month)$"),
+ trackerIds: Optional[List[int]] = Query(default=None),
+ context: RequestContext = Depends(getRequestContext),
+) -> RedmineStatsDto:
+ mandateId = _validateInstanceAccess(instanceId, context)
+ try:
+ return await serviceRedmineStats.getStats(
+ context.user,
+ mandateId,
+ instanceId,
+ dateFrom=dateFrom,
+ dateTo=dateTo,
+ bucket=bucket,
+ trackerIds=trackerIds,
+ )
+ except RedmineNotConfiguredError as e:
+ raise HTTPException(status_code=409, detail=str(e))
+ except RedmineApiError as e:
+ raise _handleRedmineError(e)
diff --git a/modules/features/redmine/serviceRedmine.py b/modules/features/redmine/serviceRedmine.py
new file mode 100644
index 00000000..5b37d00a
--- /dev/null
+++ b/modules/features/redmine/serviceRedmine.py
@@ -0,0 +1,609 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine service layer.
+
+Sits between the FastAPI route and the connector. Responsibilities:
+
+- Resolve the connector for an authenticated user / feature instance.
+- Cache project meta (trackers, statuses, priorities, custom fields, users)
+ on the instance config.
+- Resolve the configured ``rootTrackerName`` against the live tracker list.
+ No heuristic / no auto-detect.
+- **Reads come from the local mirror** (``RedmineTicketMirror`` /
+ ``RedmineRelationMirror`` in ``poweron_redmine``). The mirror is
+ populated by ``serviceRedmineSync`` (button or scheduler).
+- **Writes go to Redmine, then immediately upsert the affected ticket
+ into the mirror** so the UI is consistent without waiting for a sync.
+- Invalidate ``serviceRedmineStatsCache`` after every successful write.
+
+All AI-tool-friendly entry points are pure async functions taking the
+authenticated ``User`` plus the explicit ``featureInstanceId`` and
+``mandateId`` so the same service can be called from REST and from the
+workflow engine without context-magic.
+"""
+
+from __future__ import annotations
+
+import logging
+import time
+from typing import Any, Dict, List, Optional, Tuple
+
+from modules.connectors.connectorTicketsRedmine import (
+ ConnectorTicketsRedmine,
+ RedmineApiError,
+)
+from modules.datamodels.datamodelUam import User
+from modules.features.redmine.datamodelRedmine import (
+ RedmineCustomFieldSchemaDto,
+ RedmineCustomFieldValueDto,
+ RedmineFieldChoiceDto,
+ RedmineFieldSchemaDto,
+ RedmineRelationCreateRequest,
+ RedmineRelationDto,
+ RedmineTicketCreateRequest,
+ RedmineTicketDto,
+ RedmineTicketUpdateRequest,
+)
+from modules.features.redmine.interfaceFeatureRedmine import (
+ RedmineObjects,
+ getInterface,
+)
+from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+
+logger = logging.getLogger(__name__)
+
+
+# ---------------------------------------------------------------------------
+# Resolution helpers
+# ---------------------------------------------------------------------------
+
+class RedmineNotConfiguredError(RuntimeError):
+ """The given feature instance has no usable Redmine config."""
+
+
+def _resolveContext(
+ currentUser: User, mandateId: Optional[str], featureInstanceId: str
+) -> Tuple[RedmineObjects, ConnectorTicketsRedmine]:
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ connector = iface.resolveConnector(featureInstanceId)
+ if not connector:
+ raise RedmineNotConfiguredError(
+ f"Redmine instance {featureInstanceId} is not configured or inactive"
+ )
+ return iface, connector
+
+
+# ---------------------------------------------------------------------------
+# Project meta -- with TTL cache stored on the config record
+# ---------------------------------------------------------------------------
+
+async def getProjectMeta(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ *,
+ forceRefresh: bool = False,
+) -> RedmineFieldSchemaDto:
+ iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ cfg = iface.getConfig(featureInstanceId)
+ if cfg is None:
+ raise RedmineNotConfiguredError("Config row vanished after connector resolve")
+
+ ttl = cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60
+ fresh_enough = (
+ cfg.schemaCache
+ and cfg.schemaCachedAt
+ and (time.time() - cfg.schemaCachedAt) < ttl
+ )
+ if fresh_enough and not forceRefresh:
+ schema = _schemaFromCache(cfg.projectId, cfg.schemaCache, cfg.rootTrackerName)
+ if schema is not None:
+ return schema
+
+ project_info = await connector.getProjectInfo()
+ trackers_raw = await connector.getTrackers()
+ statuses_raw = await connector.getStatuses()
+ priorities_raw = await connector.getPriorities()
+ custom_fields_raw = await connector.getCustomFields()
+ users_raw = await connector.getProjectUsers()
+
+ schema_cache: Dict[str, Any] = {
+ "projectName": project_info.get("name", ""),
+ "trackers": [{"id": t.get("id"), "name": t.get("name")} for t in trackers_raw],
+ "statuses": [
+ {
+ "id": s.get("id"),
+ "name": s.get("name"),
+ "isClosed": bool(s.get("is_closed")),
+ }
+ for s in statuses_raw
+ ],
+ "priorities": [{"id": p.get("id"), "name": p.get("name")} for p in priorities_raw],
+ "users": [{"id": u.get("id"), "name": u.get("name")} for u in users_raw],
+ "customFields": [
+ {
+ "id": cf.get("id"),
+ "name": cf.get("name"),
+ "fieldFormat": cf.get("field_format", "string"),
+ "isRequired": bool(cf.get("is_required")),
+ "possibleValues": [pv.get("value") for pv in (cf.get("possible_values") or []) if pv.get("value") is not None],
+ "multiple": bool(cf.get("multiple")),
+ "defaultValue": cf.get("default_value"),
+ }
+ for cf in custom_fields_raw
+ ],
+ }
+ iface.updateSchemaCache(featureInstanceId, schema_cache)
+ iface.markConfigConnected(featureInstanceId)
+
+ return _schemaFromCache(cfg.projectId, schema_cache, cfg.rootTrackerName) or RedmineFieldSchemaDto(
+ projectId=cfg.projectId,
+ projectName=schema_cache["projectName"],
+ rootTrackerName=cfg.rootTrackerName,
+ )
+
+
+def _resolveRootTrackerId(
+ rootTrackerName: str, trackers: List[Dict[str, Any]]
+) -> Optional[int]:
+ """Resolve the configured root tracker name to a tracker id.
+
+ Strict: case-insensitive exact match. Returns ``None`` if not found
+ (the UI must surface this as a config error).
+ """
+ target = (rootTrackerName or "").strip().lower()
+ if not target:
+ return None
+ for t in trackers:
+ if str(t.get("name") or "").strip().lower() == target:
+ tid = t.get("id")
+ return int(tid) if tid is not None else None
+ return None
+
+
+def _schemaFromCache(
+ projectId: str, cache: Optional[Dict[str, Any]], rootTrackerName: str
+) -> Optional[RedmineFieldSchemaDto]:
+ if not cache:
+ return None
+ trackers = cache.get("trackers") or []
+ return RedmineFieldSchemaDto(
+ projectId=projectId,
+ projectName=str(cache.get("projectName") or ""),
+ trackers=[RedmineFieldChoiceDto(**t) for t in trackers],
+ statuses=[RedmineFieldChoiceDto(**s) for s in cache.get("statuses") or []],
+ priorities=[RedmineFieldChoiceDto(**p) for p in cache.get("priorities") or []],
+ users=[RedmineFieldChoiceDto(**u) for u in cache.get("users") or []],
+ customFields=[
+ RedmineCustomFieldSchemaDto(
+ id=cf.get("id"),
+ name=cf.get("name", ""),
+ fieldFormat=cf.get("fieldFormat", "string"),
+ isRequired=bool(cf.get("isRequired")),
+ possibleValues=list(cf.get("possibleValues") or []),
+ multiple=bool(cf.get("multiple")),
+ defaultValue=cf.get("defaultValue"),
+ )
+ for cf in cache.get("customFields") or []
+ if cf.get("id") is not None
+ ],
+ rootTrackerName=rootTrackerName,
+ rootTrackerId=_resolveRootTrackerId(rootTrackerName, trackers),
+ )
+
+
+# ---------------------------------------------------------------------------
+# Mirror -> RedmineTicketDto
+# ---------------------------------------------------------------------------
+
+def _mirroredRowToDto(
+ row: Dict[str, Any], relations: List[Dict[str, Any]], includeRaw: bool = False
+) -> RedmineTicketDto:
+ return RedmineTicketDto(
+ id=int(row.get("redmineId")),
+ subject=str(row.get("subject") or ""),
+ description=str(row.get("description") or ""),
+ trackerId=row.get("trackerId"),
+ trackerName=row.get("trackerName"),
+ statusId=row.get("statusId"),
+ statusName=row.get("statusName"),
+ isClosed=bool(row.get("isClosed")),
+ priorityId=row.get("priorityId"),
+ priorityName=row.get("priorityName"),
+ assignedToId=row.get("assignedToId"),
+ assignedToName=row.get("assignedToName"),
+ authorId=row.get("authorId"),
+ authorName=row.get("authorName"),
+ parentId=row.get("parentId"),
+ fixedVersionId=row.get("fixedVersionId"),
+ fixedVersionName=row.get("fixedVersionName"),
+ createdOn=row.get("createdOn"),
+ updatedOn=row.get("updatedOn"),
+ customFields=[
+ RedmineCustomFieldValueDto(
+ id=int(cf.get("id")),
+ name=str(cf.get("name") or ""),
+ value=cf.get("value"),
+ )
+ for cf in (row.get("customFields") or [])
+ if cf.get("id") is not None
+ ],
+ relations=[
+ RedmineRelationDto(
+ id=int(r.get("redmineRelationId") or r.get("id")),
+ issueId=int(r.get("issueId")),
+ issueToId=int(r.get("issueToId")),
+ relationType=str(r.get("relationType") or "relates"),
+ delay=r.get("delay"),
+ )
+ for r in relations
+ if (r.get("redmineRelationId") or r.get("id")) is not None
+ ],
+ raw=row.get("raw") if includeRaw else None,
+ )
+
+
+def _isoToEpoch(value: Optional[str]) -> Optional[float]:
+ if not value:
+ return None
+ try:
+ from datetime import datetime
+ return datetime.fromisoformat(value.replace("Z", "+00:00")).timestamp()
+ except Exception:
+ return None
+
+
+# ---------------------------------------------------------------------------
+# Read API -- from mirror
+# ---------------------------------------------------------------------------
+
+def listTickets(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ *,
+ trackerIds: Optional[List[int]] = None,
+ statusFilter: str = "*",
+ updatedOnFrom: Optional[str] = None,
+ updatedOnTo: Optional[str] = None,
+ assignedToId: Optional[int] = None,
+) -> List[RedmineTicketDto]:
+ """List tickets from the local mirror.
+
+ ``statusFilter`` accepts ``"open"``, ``"closed"`` or ``"*"`` (any),
+ matching the Redmine ``status_id`` semantics.
+ """
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ rows = iface.listMirroredTickets(
+ featureInstanceId,
+ trackerIds=trackerIds,
+ assigneeId=assignedToId,
+ updatedFromTs=_isoToEpoch(updatedOnFrom),
+ updatedToTs=_isoToEpoch(updatedOnTo),
+ )
+ if statusFilter and statusFilter != "*":
+ want_closed = statusFilter == "closed"
+ rows = [r for r in rows if bool(r.get("isClosed")) == want_closed]
+
+ relations_all = iface.listMirroredRelations(featureInstanceId)
+ relations_by_issue: Dict[int, List[Dict[str, Any]]] = {}
+ ids = {int(r.get("redmineId")) for r in rows}
+ for r in relations_all:
+ a = int(r.get("issueId") or 0)
+ b = int(r.get("issueToId") or 0)
+ for k in (a, b):
+ if k in ids:
+ relations_by_issue.setdefault(k, []).append(r)
+
+ return [
+ _mirroredRowToDto(row, relations_by_issue.get(int(row.get("redmineId")), []))
+ for row in rows
+ ]
+
+
+def getTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+ *,
+ includeRaw: bool = True,
+) -> Optional[RedmineTicketDto]:
+ """Read a single ticket from the mirror. Returns ``None`` when not present."""
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ rows = iface.listMirroredTickets(featureInstanceId)
+ target = next((r for r in rows if int(r.get("redmineId") or 0) == int(issueId)), None)
+ if target is None:
+ return None
+ relations_all = iface.listMirroredRelations(featureInstanceId)
+ rel = [
+ r for r in relations_all
+ if int(r.get("issueId") or 0) == int(issueId) or int(r.get("issueToId") or 0) == int(issueId)
+ ]
+ return _mirroredRowToDto(target, rel, includeRaw=includeRaw)
+
+
+# ---------------------------------------------------------------------------
+# Write API -- idempotent + cache invalidation + mirror upsert
+# ---------------------------------------------------------------------------
+
+def _invalidateCache(featureInstanceId: str) -> None:
+ try:
+ _getStatsCache().invalidateInstance(featureInstanceId)
+ except Exception as e:
+ logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
+
+
+def _diffPayload(
+ current: RedmineTicketDto, update: RedmineTicketUpdateRequest
+) -> Dict[str, Any]:
+ """Return the Redmine ``issue`` payload containing only changed fields."""
+ payload: Dict[str, Any] = {}
+ if update.subject is not None and update.subject != current.subject:
+ payload["subject"] = update.subject
+ if update.description is not None and update.description != current.description:
+ payload["description"] = update.description
+ if update.trackerId is not None and update.trackerId != current.trackerId:
+ payload["tracker_id"] = int(update.trackerId)
+ if update.statusId is not None and update.statusId != current.statusId:
+ payload["status_id"] = int(update.statusId)
+ if update.priorityId is not None and update.priorityId != current.priorityId:
+ payload["priority_id"] = int(update.priorityId)
+ if update.assignedToId is not None and update.assignedToId != current.assignedToId:
+ payload["assigned_to_id"] = int(update.assignedToId)
+ if update.parentIssueId is not None and update.parentIssueId != current.parentId:
+ payload["parent_issue_id"] = int(update.parentIssueId)
+ if update.fixedVersionId is not None and update.fixedVersionId != current.fixedVersionId:
+ payload["fixed_version_id"] = int(update.fixedVersionId)
+ if update.customFields:
+ current_by_id = {cf.id: cf.value for cf in current.customFields}
+ cf_payload: List[Dict[str, Any]] = []
+ for cf_id, value in update.customFields.items():
+ try:
+ cf_id_int = int(cf_id)
+ except Exception:
+ continue
+ if current_by_id.get(cf_id_int) != value:
+ cf_payload.append({"id": cf_id_int, "value": value})
+ if cf_payload:
+ payload["custom_fields"] = cf_payload
+ return payload
+
+
+async def _refreshMirroredTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+) -> None:
+ from modules.features.redmine.serviceRedmineSync import upsertSingleTicket
+ try:
+ await upsertSingleTicket(currentUser, mandateId, featureInstanceId, int(issueId))
+ except Exception as e:
+ logger.warning(f"Mirror upsert for issue {issueId} failed: {e}")
+
+
+async def updateTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+ update: RedmineTicketUpdateRequest,
+) -> RedmineTicketDto:
+ """Idempotent: fetch the issue from Redmine (live, for diff accuracy),
+ only PUT if non-empty, then upsert the mirror."""
+ _, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
+ current_live = await connector.getIssue(int(issueId), includeRelations=False)
+ current = _liveIssueToDto(current_live, schema)
+ payload = _diffPayload(current, update)
+ if not payload and not update.notes:
+ return current
+
+ await connector.updateIssue(int(issueId), payload, notes=update.notes)
+ await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
+ _invalidateCache(featureInstanceId)
+ refreshed = getTicket(currentUser, mandateId, featureInstanceId, int(issueId), includeRaw=True)
+ return refreshed or current
+
+
+async def createTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ payload: RedmineTicketCreateRequest,
+) -> RedmineTicketDto:
+ _, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
+ fields: Dict[str, Any] = {
+ "subject": payload.subject,
+ "tracker_id": int(payload.trackerId),
+ "description": payload.description or "",
+ }
+ if payload.statusId is not None:
+ fields["status_id"] = int(payload.statusId)
+ if payload.priorityId is not None:
+ fields["priority_id"] = int(payload.priorityId)
+ if payload.assignedToId is not None:
+ fields["assigned_to_id"] = int(payload.assignedToId)
+ if payload.parentIssueId is not None:
+ fields["parent_issue_id"] = int(payload.parentIssueId)
+ if payload.fixedVersionId is not None:
+ fields["fixed_version_id"] = int(payload.fixedVersionId)
+ if payload.customFields:
+ fields["custom_fields"] = [
+ {"id": int(k), "value": v} for k, v in payload.customFields.items()
+ ]
+ created = await connector.createIssue(fields)
+ if created.get("id"):
+ await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(created["id"]))
+ _invalidateCache(featureInstanceId)
+ if not created.get("id"):
+ return _liveIssueToDto(created, schema, includeRaw=True)
+ fresh = getTicket(currentUser, mandateId, featureInstanceId, int(created["id"]), includeRaw=True)
+ return fresh or _liveIssueToDto(created, schema, includeRaw=True)
+
+
+async def deleteTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+ *,
+ fallbackStatusId: Optional[int] = None,
+) -> Dict[str, Any]:
+ """Try DELETE; on Redmine's 403/401 silently fall back to a closed
+ status if ``fallbackStatusId`` is provided.
+
+ Returns ``{deleted: bool, archived: bool, statusId: int|None}``.
+ """
+ iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ deleted = await connector.deleteIssue(int(issueId))
+ if deleted:
+ from modules.features.redmine.serviceRedmineSync import deleteMirroredTicket
+ deleteMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
+ _invalidateCache(featureInstanceId)
+ return {"deleted": True, "archived": False, "statusId": None}
+ if fallbackStatusId is not None:
+ await connector.updateIssue(
+ int(issueId),
+ {"status_id": int(fallbackStatusId)},
+ notes="Archived via Porta -- delete forbidden by Redmine",
+ )
+ await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
+ _invalidateCache(featureInstanceId)
+ return {"deleted": False, "archived": True, "statusId": int(fallbackStatusId)}
+ return {"deleted": False, "archived": False, "statusId": None}
+
+
+async def addRelation(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+ payload: RedmineRelationCreateRequest,
+) -> Dict[str, Any]:
+ _, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ rel = await connector.addRelation(
+ int(issueId),
+ int(payload.issueToId),
+ relationType=payload.relationType,
+ delay=payload.delay,
+ )
+ await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
+ await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(payload.issueToId))
+ _invalidateCache(featureInstanceId)
+ return rel
+
+
+async def deleteRelation(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ relationId: int,
+) -> bool:
+ iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
+ ok = await connector.deleteRelation(int(relationId))
+ if ok:
+ iface.deleteMirroredRelationByRedmineId(featureInstanceId, int(relationId))
+ _invalidateCache(featureInstanceId)
+ return ok
+
+
+# ---------------------------------------------------------------------------
+# Live (Redmine) -> RedmineTicketDto -- only used by the write paths to
+# compute idempotent diffs against the current Redmine state.
+# ---------------------------------------------------------------------------
+
+def _statusIsClosedFromSchema(statusId: Optional[int], schema: Optional[RedmineFieldSchemaDto]) -> bool:
+ if statusId is None or schema is None:
+ return False
+ for s in schema.statuses:
+ if s.id == statusId:
+ return bool(s.isClosed)
+ return False
+
+
+def _liveIssueToDto(
+ issue: Dict[str, Any], schema: Optional[RedmineFieldSchemaDto] = None, *, includeRaw: bool = False
+) -> RedmineTicketDto:
+ tracker = issue.get("tracker") or {}
+ status = issue.get("status") or {}
+ priority = issue.get("priority") or {}
+ assigned = issue.get("assigned_to") or {}
+ author = issue.get("author") or {}
+ fixed_version = issue.get("fixed_version") or {}
+ status_id = status.get("id")
+ return RedmineTicketDto(
+ id=int(issue.get("id")),
+ subject=str(issue.get("subject") or ""),
+ description=str(issue.get("description") or ""),
+ trackerId=tracker.get("id"),
+ trackerName=tracker.get("name"),
+ statusId=status_id,
+ statusName=status.get("name"),
+ isClosed=_statusIsClosedFromSchema(status_id, schema),
+ priorityId=priority.get("id"),
+ priorityName=priority.get("name"),
+ assignedToId=assigned.get("id"),
+ assignedToName=assigned.get("name"),
+ authorId=author.get("id"),
+ authorName=author.get("name"),
+ parentId=(issue.get("parent") or {}).get("id"),
+ fixedVersionId=fixed_version.get("id"),
+ fixedVersionName=fixed_version.get("name"),
+ createdOn=issue.get("created_on"),
+ updatedOn=issue.get("updated_on"),
+ customFields=[
+ RedmineCustomFieldValueDto(
+ id=int(cf.get("id")),
+ name=str(cf.get("name") or ""),
+ value=cf.get("value"),
+ )
+ for cf in issue.get("custom_fields") or []
+ if cf.get("id") is not None
+ ],
+ relations=[
+ RedmineRelationDto(
+ id=int(r.get("id")),
+ issueId=int(r.get("issue_id")),
+ issueToId=int(r.get("issue_to_id")),
+ relationType=str(r.get("relation_type") or "relates"),
+ delay=r.get("delay"),
+ )
+ for r in issue.get("relations") or []
+ if r.get("id") is not None
+ ],
+ raw=issue if includeRaw else None,
+ )
+
+
+# ---------------------------------------------------------------------------
+# Connection self-test (used by the Settings page button)
+# ---------------------------------------------------------------------------
+
+async def testConnection(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+) -> Dict[str, Any]:
+ """Calls ``whoAmI`` and a minimal project fetch. Updates the
+ ``lastConnectedAt`` timestamp on success. Never raises -- returns a
+ structured dict for the UI."""
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ connector = iface.resolveConnector(featureInstanceId)
+ if not connector:
+ return {"ok": False, "reason": "notConfigured", "message": "Keine gueltige Redmine-Konfiguration."}
+ try:
+ user = await connector.whoAmI()
+ project = await connector.getProjectInfo()
+ iface.markConfigConnected(featureInstanceId)
+ return {
+ "ok": True,
+ "user": {"id": user.get("id"), "name": (user.get("firstname") or "") + " " + (user.get("lastname") or "")},
+ "project": {"id": project.get("id"), "name": project.get("name")},
+ }
+ except RedmineApiError as e:
+ return {"ok": False, "reason": "httpError", "status": e.status, "message": (e.body or "")[:300]}
+ except Exception as e:
+ return {"ok": False, "reason": "exception", "message": str(e)[:300]}
diff --git a/modules/features/redmine/serviceRedmineStats.py b/modules/features/redmine/serviceRedmineStats.py
new file mode 100644
index 00000000..5f385df7
--- /dev/null
+++ b/modules/features/redmine/serviceRedmineStats.py
@@ -0,0 +1,403 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Redmine statistics aggregator.
+
+Returns raw buckets in :class:`RedmineStatsDto`. The frontend
+(``RedmineStatsPage.tsx``) maps these onto ``ReportSection`` for
+``FormGeneratorReport``. Decision 2026-04-21.
+
+Sections produced:
+- KPIs: total / open / closed / closedInPeriod / createdInPeriod / orphans
+- statusByTracker (stacked bar)
+- throughput (line chart, created vs closed per bucket)
+- topAssignees (top-10 horizontal bar)
+- relationDistribution (pie)
+- backlogAging (open issues by age since last update)
+
+The whole result is cached in :mod:`serviceRedmineStatsCache` keyed by
+``(instanceId, dateFrom, dateTo, bucket, trackerIds)`` with a 90 s TTL.
+"""
+
+from __future__ import annotations
+
+import datetime as _dt
+import logging
+from collections import Counter, defaultdict
+from typing import Any, Dict, Iterable, List, Optional, Tuple
+
+from modules.datamodels.datamodelUam import User
+from modules.features.redmine.datamodelRedmine import (
+ RedmineAgingBucket,
+ RedmineAssigneeBucket,
+ RedmineFieldSchemaDto,
+ RedmineRelationDistributionEntry,
+ RedmineStatsDto,
+ RedmineStatsKpis,
+ RedmineStatusByTrackerEntry,
+ RedmineThroughputBucket,
+ RedmineTicketDto,
+)
+from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+
+logger = logging.getLogger(__name__)
+
+
+# ---------------------------------------------------------------------------
+# Public entry
+# ---------------------------------------------------------------------------
+
+async def getStats(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ *,
+ dateFrom: Optional[str] = None,
+ dateTo: Optional[str] = None,
+ bucket: str = "week",
+ trackerIds: Optional[List[int]] = None,
+) -> RedmineStatsDto:
+ """Compute (or fetch from cache) the full statistics payload."""
+ bucket_norm = (bucket or "week").lower()
+ if bucket_norm not in {"day", "week", "month"}:
+ bucket_norm = "week"
+ tracker_ids_norm: List[int] = sorted({int(t) for t in trackerIds or []})
+
+ cache = _getStatsCache()
+ cache_key = cache.buildKey(featureInstanceId, dateFrom, dateTo, bucket_norm, tracker_ids_norm)
+ cached = cache.get(cache_key)
+ if cached is not None:
+ return cached
+
+ # Lazy import: keeps the pure aggregation helpers below importable
+ # without dragging in aiohttp / DB connector at module load.
+ from modules.features.redmine.serviceRedmine import (
+ getProjectMeta,
+ listTickets,
+ )
+
+ schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
+ root_tracker_id = schema.rootTrackerId
+
+ tickets = listTickets(
+ currentUser,
+ mandateId,
+ featureInstanceId,
+ trackerIds=tracker_ids_norm or None,
+ statusFilter="*",
+ )
+
+ stats = _aggregate(
+ tickets,
+ schema=schema,
+ rootTrackerId=root_tracker_id,
+ dateFrom=dateFrom,
+ dateTo=dateTo,
+ bucket=bucket_norm,
+ trackerIdsFilter=tracker_ids_norm,
+ instanceId=featureInstanceId,
+ )
+
+ cache.set(cache_key, stats)
+ return stats
+
+
+# ---------------------------------------------------------------------------
+# Pure aggregation (testable without I/O)
+# ---------------------------------------------------------------------------
+
+def _aggregate(
+ tickets: List[RedmineTicketDto],
+ *,
+ schema: Optional[RedmineFieldSchemaDto],
+ rootTrackerId: Optional[int],
+ dateFrom: Optional[str],
+ dateTo: Optional[str],
+ bucket: str,
+ trackerIdsFilter: List[int],
+ instanceId: str,
+) -> RedmineStatsDto:
+ period_from = _parseIsoDate(dateFrom)
+ period_to = _parseIsoDate(dateTo)
+
+ kpis = _kpis(tickets, rootTrackerId, period_from, period_to)
+ status_by_tracker = _statusByTracker(tickets, schema)
+ throughput = _throughput(tickets, period_from, period_to, bucket)
+ top_assignees = _topAssignees(tickets, limit=10)
+ relation_distribution = _relationDistribution(tickets)
+ backlog_aging = _backlogAging(tickets, now=_utcNow())
+
+ return RedmineStatsDto(
+ instanceId=instanceId,
+ dateFrom=dateFrom,
+ dateTo=dateTo,
+ bucket=bucket,
+ trackerIds=trackerIdsFilter,
+ kpis=kpis,
+ statusByTracker=status_by_tracker,
+ throughput=throughput,
+ topAssignees=top_assignees,
+ relationDistribution=relation_distribution,
+ backlogAging=backlog_aging,
+ )
+
+
+# ---------------------------------------------------------------------------
+# Section builders
+# ---------------------------------------------------------------------------
+
+def _kpis(
+ tickets: List[RedmineTicketDto],
+ rootTrackerId: Optional[int],
+ periodFrom: Optional[_dt.datetime],
+ periodTo: Optional[_dt.datetime],
+) -> RedmineStatsKpis:
+ total = len(tickets)
+ open_count = sum(1 for t in tickets if not t.isClosed)
+ closed_count = sum(1 for t in tickets if t.isClosed)
+
+ closed_in_period = 0
+ created_in_period = 0
+ for t in tickets:
+ created = _parseIsoDate(t.createdOn)
+ updated = _parseIsoDate(t.updatedOn)
+ if created and _inPeriod(created, periodFrom, periodTo):
+ created_in_period += 1
+ if t.isClosed and updated and _inPeriod(updated, periodFrom, periodTo):
+ closed_in_period += 1
+
+ orphans = _countOrphans(tickets, rootTrackerId)
+
+ return RedmineStatsKpis(
+ total=total,
+ open=open_count,
+ closed=closed_count,
+ closedInPeriod=closed_in_period,
+ createdInPeriod=created_in_period,
+ orphans=orphans,
+ )
+
+
+def _countOrphans(
+ tickets: List[RedmineTicketDto], rootTrackerId: Optional[int]
+) -> int:
+ """A ticket is an orphan if it is not a root user-story AND not
+ reachable (via parent or any relation, in either direction) to any
+ root user-story within the same loaded set."""
+ if not tickets:
+ return 0
+ by_id: Dict[int, RedmineTicketDto] = {t.id: t for t in tickets}
+ roots: set[int] = {
+ t.id for t in tickets if rootTrackerId and t.trackerId == rootTrackerId
+ }
+ if not roots:
+ return sum(1 for t in tickets if not (rootTrackerId and t.trackerId == rootTrackerId))
+
+ adjacency: Dict[int, set[int]] = defaultdict(set)
+ for t in tickets:
+ if t.parentId is not None and t.parentId in by_id:
+ adjacency[t.id].add(t.parentId)
+ adjacency[t.parentId].add(t.id)
+ for r in t.relations:
+ for a, b in ((r.issueId, r.issueToId), (r.issueToId, r.issueId)):
+ if a in by_id and b in by_id and a != b:
+ adjacency[a].add(b)
+
+ reached: set[int] = set(roots)
+ frontier: List[int] = list(roots)
+ while frontier:
+ nxt: List[int] = []
+ for tid in frontier:
+ for neighbour in adjacency.get(tid, ()): # type: ignore[arg-type]
+ if neighbour not in reached:
+ reached.add(neighbour)
+ nxt.append(neighbour)
+ frontier = nxt
+ return sum(1 for t in tickets if t.id not in reached)
+
+
+def _statusByTracker(
+ tickets: List[RedmineTicketDto], schema: Optional[RedmineFieldSchemaDto]
+) -> List[RedmineStatusByTrackerEntry]:
+ by_tracker: Dict[Tuple[Optional[int], str], Counter] = defaultdict(Counter)
+ for t in tickets:
+ key = (t.trackerId, t.trackerName or "(unbekannt)")
+ by_tracker[key][t.statusName or "(unbekannt)"] += 1
+ out: List[RedmineStatusByTrackerEntry] = []
+ for (tid, tname), ctr in by_tracker.items():
+ out.append(
+ RedmineStatusByTrackerEntry(
+ trackerId=tid,
+ trackerName=tname,
+ countsByStatus=dict(ctr),
+ total=sum(ctr.values()),
+ )
+ )
+ out.sort(key=lambda e: e.total, reverse=True)
+ return out
+
+
+def _throughput(
+ tickets: List[RedmineTicketDto],
+ periodFrom: Optional[_dt.datetime],
+ periodTo: Optional[_dt.datetime],
+ bucket: str,
+) -> List[RedmineThroughputBucket]:
+ if not tickets:
+ return []
+
+ if periodFrom is None or periodTo is None:
+ all_dates: List[_dt.datetime] = []
+ for t in tickets:
+ for s in (t.createdOn, t.updatedOn):
+ d = _parseIsoDate(s)
+ if d:
+ all_dates.append(d)
+ if not all_dates:
+ return []
+ periodFrom = periodFrom or min(all_dates)
+ periodTo = periodTo or max(all_dates)
+
+ created_counter: Counter = Counter()
+ closed_counter: Counter = Counter()
+ for t in tickets:
+ c = _parseIsoDate(t.createdOn)
+ if c and _inPeriod(c, periodFrom, periodTo):
+ created_counter[_bucketKey(c, bucket)] += 1
+ if t.isClosed:
+ u = _parseIsoDate(t.updatedOn)
+ if u and _inPeriod(u, periodFrom, periodTo):
+ closed_counter[_bucketKey(u, bucket)] += 1
+
+ keys: List[str] = sorted(set(created_counter) | set(closed_counter))
+ if not keys:
+ return []
+ out: List[RedmineThroughputBucket] = []
+ for key in keys:
+ out.append(
+ RedmineThroughputBucket(
+ bucketKey=key,
+ label=_bucketLabel(key, bucket),
+ created=int(created_counter.get(key, 0)),
+ closed=int(closed_counter.get(key, 0)),
+ )
+ )
+ return out
+
+
+def _topAssignees(
+ tickets: List[RedmineTicketDto], *, limit: int = 10
+) -> List[RedmineAssigneeBucket]:
+ by_assignee: Dict[Tuple[Optional[int], str], int] = defaultdict(int)
+ for t in tickets:
+ if t.isClosed:
+ continue
+ key = (t.assignedToId, t.assignedToName or "(nicht zugewiesen)")
+ by_assignee[key] += 1
+ sorted_items = sorted(by_assignee.items(), key=lambda kv: kv[1], reverse=True)[:limit]
+ return [
+ RedmineAssigneeBucket(assignedToId=k[0], name=k[1], open=v)
+ for k, v in sorted_items
+ ]
+
+
+def _relationDistribution(
+ tickets: List[RedmineTicketDto],
+) -> List[RedmineRelationDistributionEntry]:
+ seen: set[int] = set()
+ counter: Counter = Counter()
+ for t in tickets:
+ for r in t.relations:
+ if r.id in seen:
+ continue
+ seen.add(r.id)
+ counter[r.relationType or "relates"] += 1
+ return [
+ RedmineRelationDistributionEntry(relationType=k, count=v)
+ for k, v in sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
+ ]
+
+
+def _backlogAging(
+ tickets: List[RedmineTicketDto], *, now: Optional[_dt.datetime] = None
+) -> List[RedmineAgingBucket]:
+ if now is None:
+ now = _utcNow()
+ buckets = [
+ RedmineAgingBucket(bucketKey="lt7", label="< 7 Tage", minDays=0, maxDays=7),
+ RedmineAgingBucket(bucketKey="7-30", label="7-30 Tage", minDays=7, maxDays=30),
+ RedmineAgingBucket(bucketKey="30-90", label="30-90 Tage", minDays=30, maxDays=90),
+ RedmineAgingBucket(bucketKey="90-180", label="90-180 Tage", minDays=90, maxDays=180),
+ RedmineAgingBucket(bucketKey="gt180", label="> 180 Tage", minDays=180, maxDays=None),
+ ]
+ for t in tickets:
+ if t.isClosed:
+ continue
+ ref = _parseIsoDate(t.updatedOn) or _parseIsoDate(t.createdOn)
+ if ref is None:
+ continue
+ age_days = max(0, (now - ref).days)
+ for b in buckets:
+ if (b.maxDays is None and age_days >= b.minDays) or (
+ b.maxDays is not None and b.minDays <= age_days < b.maxDays
+ ):
+ b.count += 1
+ break
+ return buckets
+
+
+# ---------------------------------------------------------------------------
+# Date helpers (no external deps)
+# ---------------------------------------------------------------------------
+
+def _utcNow() -> _dt.datetime:
+ """Naive UTC ``datetime`` -- the rest of the helpers compare naive
+ objects, so we strip tz info on purpose."""
+ return _dt.datetime.now(_dt.timezone.utc).replace(tzinfo=None)
+
+
+def _parseIsoDate(value: Optional[str]) -> Optional[_dt.datetime]:
+ if not value:
+ return None
+ try:
+ s = value.replace("Z", "+00:00") if isinstance(value, str) else value
+ if isinstance(s, str) and "T" not in s and len(s) == 10:
+ return _dt.datetime.strptime(s, "%Y-%m-%d")
+ return _dt.datetime.fromisoformat(s).replace(tzinfo=None)
+ except Exception:
+ try:
+ return _dt.datetime.strptime(str(value)[:10], "%Y-%m-%d")
+ except Exception:
+ return None
+
+
+def _inPeriod(
+ when: _dt.datetime,
+ fromDate: Optional[_dt.datetime],
+ toDate: Optional[_dt.datetime],
+) -> bool:
+ if fromDate and when < fromDate:
+ return False
+ if toDate and when > toDate + _dt.timedelta(days=1):
+ return False
+ return True
+
+
+def _bucketKey(when: _dt.datetime, bucket: str) -> str:
+ if bucket == "day":
+ return when.strftime("%Y-%m-%d")
+ if bucket == "month":
+ return when.strftime("%Y-%m")
+ iso_year, iso_week, _ = when.isocalendar()
+ return f"{iso_year}-W{iso_week:02d}"
+
+
+def _bucketLabel(key: str, bucket: str) -> str:
+ if bucket == "day":
+ return key
+ if bucket == "month":
+ try:
+ d = _dt.datetime.strptime(key, "%Y-%m")
+ return d.strftime("%b %Y")
+ except Exception:
+ return key
+ return key
diff --git a/modules/features/redmine/serviceRedmineStatsCache.py b/modules/features/redmine/serviceRedmineStatsCache.py
new file mode 100644
index 00000000..18a81ead
--- /dev/null
+++ b/modules/features/redmine/serviceRedmineStatsCache.py
@@ -0,0 +1,105 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""TTL-based in-memory cache for ``serviceRedmineStats`` results.
+
+The cache key is ``(featureInstanceId, dateFrom, dateTo, bucket, sorted(trackerIds))``.
+Any write through ``serviceRedmine`` (createIssue, updateIssue, deleteIssue,
+addRelation, deleteRelation) MUST call :func:`invalidateInstance` to drop
+all cached entries for that feature instance.
+
+Default TTL: 90 seconds. Override at construction or via ``setTtl``.
+"""
+
+from __future__ import annotations
+
+import threading
+import time
+from dataclasses import dataclass
+from typing import Any, Dict, Iterable, Optional, Tuple
+
+
+_DEFAULT_TTL_SECONDS = 90.0
+
+
+@dataclass
+class _CacheEntry:
+ value: Any
+ expiresAt: float
+
+
+CacheKey = Tuple[str, Optional[str], Optional[str], str, Tuple[int, ...]]
+
+
+class RedmineStatsCache:
+ """Thread-safe TTL cache."""
+
+ def __init__(self, ttlSeconds: float = _DEFAULT_TTL_SECONDS) -> None:
+ self._ttlSeconds = float(ttlSeconds)
+ self._store: Dict[CacheKey, _CacheEntry] = {}
+ self._lock = threading.Lock()
+
+ def setTtl(self, ttlSeconds: float) -> None:
+ self._ttlSeconds = float(ttlSeconds)
+
+ @staticmethod
+ def buildKey(
+ featureInstanceId: str,
+ dateFrom: Optional[str],
+ dateTo: Optional[str],
+ bucket: str,
+ trackerIds: Iterable[int],
+ ) -> CacheKey:
+ return (
+ str(featureInstanceId),
+ dateFrom or None,
+ dateTo or None,
+ (bucket or "week").lower(),
+ tuple(sorted(int(t) for t in trackerIds or [])),
+ )
+
+ def get(self, key: CacheKey) -> Optional[Any]:
+ now = time.monotonic()
+ with self._lock:
+ entry = self._store.get(key)
+ if not entry:
+ return None
+ if entry.expiresAt < now:
+ self._store.pop(key, None)
+ return None
+ return entry.value
+
+ def set(self, key: CacheKey, value: Any, *, ttlSeconds: Optional[float] = None) -> None:
+ ttl = float(ttlSeconds) if ttlSeconds is not None else self._ttlSeconds
+ with self._lock:
+ self._store[key] = _CacheEntry(value=value, expiresAt=time.monotonic() + ttl)
+
+ def invalidateInstance(self, featureInstanceId: str) -> int:
+ """Drop every entry whose key starts with ``featureInstanceId``.
+
+ Returns the number of entries dropped.
+ """
+ target = str(featureInstanceId)
+ with self._lock:
+ to_drop = [k for k in self._store.keys() if k[0] == target]
+ for k in to_drop:
+ self._store.pop(k, None)
+ return len(to_drop)
+
+ def clear(self) -> None:
+ with self._lock:
+ self._store.clear()
+
+ def size(self) -> int:
+ with self._lock:
+ return len(self._store)
+
+
+_globalCache: Optional[RedmineStatsCache] = None
+
+
+def _getStatsCache() -> RedmineStatsCache:
+ """Process-wide singleton."""
+ global _globalCache
+ if _globalCache is None:
+ _globalCache = RedmineStatsCache()
+ return _globalCache
diff --git a/modules/features/redmine/serviceRedmineSync.py b/modules/features/redmine/serviceRedmineSync.py
new file mode 100644
index 00000000..2d5ba2ab
--- /dev/null
+++ b/modules/features/redmine/serviceRedmineSync.py
@@ -0,0 +1,315 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Incremental Redmine -> ``poweron_redmine`` mirror sync.
+
+Strategy:
+- **Full sync** when ``RedmineInstanceConfig.lastSyncAt`` is None or
+ ``force=True`` is requested. Pulls every issue with ``status_id=*``
+ (open + closed) for the configured project, paginated.
+- **Incremental sync** otherwise. Pulls only issues whose ``updated_on``
+ is greater than ``lastSyncAt - overlapSeconds`` (default 1h overlap to
+ catch clock skew and missed updates).
+- Each issue is upserted into ``RedmineTicketMirror`` (looked up by
+ ``(featureInstanceId, redmineId)``).
+- The full set of relations attached to each issue replaces any existing
+ relation rows for that issue in ``RedmineRelationMirror``.
+
+Concurrency: a per-instance ``asyncio.Lock`` prevents two concurrent
+syncs for the same feature instance.
+
+After every successful sync the in-memory stats cache is invalidated for
+the instance.
+"""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+import time
+from typing import Any, Dict, List, Optional
+
+from modules.connectors.connectorTicketsRedmine import RedmineApiError
+from modules.datamodels.datamodelUam import User
+from modules.features.redmine.datamodelRedmine import (
+ RedmineInstanceConfig,
+ RedmineRelationMirror,
+ RedmineSyncResultDto,
+ RedmineSyncStatusDto,
+ RedmineTicketMirror,
+)
+from modules.features.redmine.interfaceFeatureRedmine import getInterface
+from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+
+logger = logging.getLogger(__name__)
+
+
+_INCREMENTAL_OVERLAP_SECONDS = 60 * 60 # 1h overlap on incremental syncs
+_DEFAULT_PAGE_SIZE = 100
+_MAX_PAGES_SAFETY = 5000 # 500k tickets safety cap
+
+_locks: Dict[str, asyncio.Lock] = {}
+
+
+def _lockFor(featureInstanceId: str) -> asyncio.Lock:
+ if featureInstanceId not in _locks:
+ _locks[featureInstanceId] = asyncio.Lock()
+ return _locks[featureInstanceId]
+
+
+# ---------------------------------------------------------------------------
+# Public API
+# ---------------------------------------------------------------------------
+
+async def runSync(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ *,
+ force: bool = False,
+ pageSize: int = _DEFAULT_PAGE_SIZE,
+) -> RedmineSyncResultDto:
+ """Run a (full or incremental) sync for the given feature instance."""
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ connector = iface.resolveConnector(featureInstanceId)
+ cfg = iface.getConfig(featureInstanceId)
+ if not connector or not cfg:
+ raise RuntimeError(
+ f"Redmine instance {featureInstanceId} is not configured or inactive"
+ )
+
+ async with _lockFor(featureInstanceId):
+ started = time.monotonic()
+ full = force or cfg.lastSyncAt is None
+ updated_from_iso: Optional[str] = None
+ if not full and cfg.lastSyncAt is not None:
+ cursor_epoch = max(0.0, cfg.lastSyncAt - _INCREMENTAL_OVERLAP_SECONDS)
+ updated_from_iso = time.strftime(
+ "%Y-%m-%dT%H:%M:%SZ", time.gmtime(cursor_epoch)
+ )
+
+ try:
+ issues = await connector.listAllIssues(
+ statusId="*",
+ updatedOnFrom=updated_from_iso,
+ pageSize=pageSize,
+ maxPages=_MAX_PAGES_SAFETY,
+ include=["relations"],
+ )
+ except RedmineApiError as e:
+ iface.recordSyncFailure(featureInstanceId, str(e))
+ raise
+
+ tickets_upserted = 0
+ relations_upserted = 0
+ now_epoch = time.time()
+
+ for issue in issues:
+ tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
+ relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
+
+ duration_ms = int((time.monotonic() - started) * 1000)
+ iface.recordSyncSuccess(
+ featureInstanceId,
+ full=full,
+ ticketsUpserted=tickets_upserted,
+ durationMs=duration_ms,
+ lastSyncAt=now_epoch,
+ )
+ _getStatsCache().invalidateInstance(featureInstanceId)
+
+ return RedmineSyncResultDto(
+ instanceId=featureInstanceId,
+ full=full,
+ ticketsUpserted=tickets_upserted,
+ relationsUpserted=relations_upserted,
+ durationMs=duration_ms,
+ lastSyncAt=now_epoch,
+ )
+
+
+def getSyncStatus(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+) -> RedmineSyncStatusDto:
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ cfg = iface.getConfig(featureInstanceId)
+ ticket_count = iface.countMirroredTickets(featureInstanceId)
+ relation_count = iface.countMirroredRelations(featureInstanceId)
+ return RedmineSyncStatusDto(
+ instanceId=featureInstanceId,
+ lastSyncAt=cfg.lastSyncAt if cfg else None,
+ lastFullSyncAt=cfg.lastFullSyncAt if cfg else None,
+ lastSyncDurationMs=cfg.lastSyncDurationMs if cfg else None,
+ lastSyncTicketCount=cfg.lastSyncTicketCount if cfg else None,
+ lastSyncErrorAt=cfg.lastSyncErrorAt if cfg else None,
+ lastSyncErrorMessage=cfg.lastSyncErrorMessage if cfg else None,
+ mirroredTicketCount=ticket_count,
+ mirroredRelationCount=relation_count,
+ )
+
+
+async def upsertSingleTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+) -> int:
+ """Re-fetch one issue from Redmine and upsert it into the mirror.
+
+ Used by the write paths in ``serviceRedmine`` so the mirror stays
+ consistent after every create / update without a full sync.
+ Returns the number of relation rows replaced.
+ """
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ connector = iface.resolveConnector(featureInstanceId)
+ if not connector:
+ raise RuntimeError("Redmine instance not configured")
+ issue = await connector.getIssue(int(issueId), includeRelations=True)
+ now_epoch = time.time()
+ _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
+ relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
+ _getStatsCache().invalidateInstance(featureInstanceId)
+ return relations_upserted
+
+
+def deleteMirroredTicket(
+ currentUser: User,
+ mandateId: Optional[str],
+ featureInstanceId: str,
+ issueId: int,
+) -> bool:
+ """Drop a ticket and its relations from the mirror after a successful Redmine DELETE."""
+ iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
+ deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
+ iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
+ _getStatsCache().invalidateInstance(featureInstanceId)
+ return deleted
+
+
+# ---------------------------------------------------------------------------
+# Per-issue upsert helpers (sync, run inside the per-instance lock)
+# ---------------------------------------------------------------------------
+
+def _upsertTicket(
+ iface,
+ featureInstanceId: str,
+ mandateId: Optional[str],
+ issue: Dict[str, Any],
+ nowEpoch: float,
+) -> int:
+ redmine_id = issue.get("id")
+ if redmine_id is None:
+ return 0
+ statuses_lookup = (iface.getConfig(featureInstanceId).schemaCache or {}).get("statuses") or []
+ is_closed = _statusIsClosed(issue.get("status") or {}, statuses_lookup)
+ record = _ticketRecordFromIssue(issue, featureInstanceId, mandateId, is_closed, nowEpoch)
+ iface.upsertMirroredTicket(featureInstanceId, int(redmine_id), record)
+ return 1
+
+
+def _replaceRelations(
+ iface,
+ featureInstanceId: str,
+ issue: Dict[str, Any],
+ nowEpoch: float,
+) -> int:
+ issue_id = issue.get("id")
+ relations = issue.get("relations") or []
+ if issue_id is None:
+ return 0
+ iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issue_id))
+ inserted = 0
+ for r in relations:
+ rid = r.get("id")
+ if rid is None:
+ continue
+ iface.insertMirroredRelation(
+ featureInstanceId,
+ {
+ "featureInstanceId": featureInstanceId,
+ "redmineRelationId": int(rid),
+ "issueId": int(r.get("issue_id") or 0),
+ "issueToId": int(r.get("issue_to_id") or 0),
+ "relationType": str(r.get("relation_type") or "relates"),
+ "delay": r.get("delay"),
+ "syncedAt": nowEpoch,
+ },
+ )
+ inserted += 1
+ return inserted
+
+
+# ---------------------------------------------------------------------------
+# Pure helpers
+# ---------------------------------------------------------------------------
+
+def _statusIsClosed(status: Dict[str, Any], statusesLookup: List[Dict[str, Any]]) -> bool:
+ """Best-effort: prefer the schemaCache; fall back to inspecting the
+ raw issue (Redmine sets ``is_closed`` on the status object only when
+ explicitly requested)."""
+ sid = status.get("id")
+ if sid is None:
+ return False
+ for s in statusesLookup:
+ if s.get("id") == sid:
+ return bool(s.get("isClosed"))
+ return bool(status.get("is_closed"))
+
+
+def _parseRedmineDateToEpoch(value: Optional[str]) -> Optional[float]:
+ if not value:
+ return None
+ try:
+ from datetime import datetime
+ s = value.replace("Z", "+00:00")
+ return datetime.fromisoformat(s).timestamp()
+ except Exception:
+ return None
+
+
+def _ticketRecordFromIssue(
+ issue: Dict[str, Any],
+ featureInstanceId: str,
+ mandateId: Optional[str],
+ isClosed: bool,
+ nowEpoch: float,
+) -> Dict[str, Any]:
+ tracker = issue.get("tracker") or {}
+ status = issue.get("status") or {}
+ priority = issue.get("priority") or {}
+ assigned = issue.get("assigned_to") or {}
+ author = issue.get("author") or {}
+ parent = issue.get("parent") or {}
+ fixed_version = issue.get("fixed_version") or {}
+ created_on = issue.get("created_on")
+ updated_on = issue.get("updated_on")
+
+ return {
+ "featureInstanceId": featureInstanceId,
+ "mandateId": mandateId,
+ "redmineId": int(issue.get("id")),
+ "subject": str(issue.get("subject") or ""),
+ "description": str(issue.get("description") or ""),
+ "trackerId": tracker.get("id"),
+ "trackerName": tracker.get("name"),
+ "statusId": status.get("id"),
+ "statusName": status.get("name"),
+ "isClosed": bool(isClosed),
+ "priorityId": priority.get("id"),
+ "priorityName": priority.get("name"),
+ "assignedToId": assigned.get("id"),
+ "assignedToName": assigned.get("name"),
+ "authorId": author.get("id"),
+ "authorName": author.get("name"),
+ "parentId": parent.get("id"),
+ "fixedVersionId": fixed_version.get("id"),
+ "fixedVersionName": fixed_version.get("name"),
+ "createdOn": created_on,
+ "updatedOn": updated_on,
+ "createdOnTs": _parseRedmineDateToEpoch(created_on),
+ "updatedOnTs": _parseRedmineDateToEpoch(updated_on),
+ "customFields": list(issue.get("custom_fields") or []),
+ "raw": issue,
+ "syncedAt": nowEpoch,
+ }
diff --git a/modules/routes/routeSystem.py b/modules/routes/routeSystem.py
index 6cccd1f5..8ead1a6d 100644
--- a/modules/routes/routeSystem.py
+++ b/modules/routes/routeSystem.py
@@ -123,6 +123,9 @@ def _getFeatureUiObjects(featureCode: str) -> List[Dict[str, Any]]:
elif featureCode == "workspace":
from modules.features.workspace.mainWorkspace import UI_OBJECTS
return UI_OBJECTS
+ elif featureCode == "redmine":
+ from modules.features.redmine.mainRedmine import UI_OBJECTS
+ return UI_OBJECTS
else:
logger.debug(f"Skipping removed feature code: {featureCode}")
return []
diff --git a/modules/serviceCenter/services/serviceAgent/conversationManager.py b/modules/serviceCenter/services/serviceAgent/conversationManager.py
index fffb2dc4..57e169be 100644
--- a/modules/serviceCenter/services/serviceAgent/conversationManager.py
+++ b/modules/serviceCenter/services/serviceAgent/conversationManager.py
@@ -7,6 +7,7 @@ import logging
from typing import List, Dict, Any, Optional
from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolDefinition
+from modules.shared.timeUtils import getRequestNow, getRequestTimezone
logger = logging.getLogger(__name__)
@@ -322,6 +323,27 @@ def _buildSummaryPrompt(
return prompt
+def _buildTemporalContext() -> str:
+ """Inject current date/time (in the user's browser timezone) into the system prompt.
+
+ LLMs have no innate access to "now" and otherwise hallucinate from their
+ training cutoff. The browser timezone is propagated via the
+ ``X-User-Timezone`` request header (see ``api.ts`` axios interceptor and the
+ ``_requestContextMiddleware`` in ``app.py``). When called outside of an HTTP
+ context, ``getRequestNow()`` falls back to UTC.
+ """
+ tz = getRequestTimezone()
+ now = getRequestNow()
+ return (
+ "## Current Date & Time\n"
+ f"- Today: {now.strftime('%Y-%m-%d (%A)')}\n"
+ f"- Now: {now.strftime('%H:%M')} ({tz})\n"
+ "- Use this for any relative time references such as \"today\", "
+ "\"yesterday\", \"last week\", \"this month\", \"Q1\", etc.\n"
+ "- Do NOT rely on your training cutoff for the current date.\n\n"
+ )
+
+
def buildSystemPrompt(
tools: List[ToolDefinition],
toolsFormatted: str = None,
@@ -342,8 +364,9 @@ def buildSystemPrompt(
)
prompt = (
- f"{langInstruction}"
- "You are an AI agent with access to tools. "
+ _buildTemporalContext()
+ + f"{langInstruction}"
+ + "You are an AI agent with access to tools. "
"Use the provided tools to accomplish the user's task. "
"Think step by step. Call tools when you need information or need to perform actions. "
"When you have enough information to answer, respond directly without calling tools.\n\n"
diff --git a/modules/serviceCenter/services/serviceAgent/featureDataAgent.py b/modules/serviceCenter/services/serviceAgent/featureDataAgent.py
index 553848ed..33a848cc 100644
--- a/modules/serviceCenter/services/serviceAgent/featureDataAgent.py
+++ b/modules/serviceCenter/services/serviceAgent/featureDataAgent.py
@@ -22,6 +22,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.featureDataProvider import FeatureDataProvider
from modules.shared.i18nRegistry import resolveText
+from modules.shared.timeUtils import getRequestNow, getRequestTimezone
logger = logging.getLogger(__name__)
@@ -321,9 +322,21 @@ def _buildSchemaContext(
header += f' (instance: "{instanceLabel}")'
header += "."
+ tz = getRequestTimezone()
+ now = getRequestNow()
+ temporalLines = [
+ "CURRENT DATE & TIME (use this for relative time references in filters):",
+ f" Today: {now.strftime('%Y-%m-%d (%A)')}",
+ f" Now: {now.strftime('%H:%M')} ({tz})",
+ " Resolve phrases like 'today', 'last month', 'Q1', 'this year' against THIS date.",
+ " Do NOT use your training cutoff for date filters.",
+ ]
+
parts = [
header,
"",
+ *temporalLines,
+ "",
"AVAILABLE TABLES (use EXACTLY these names as tableName parameter):",
*tableBlocks,
"",
diff --git a/modules/shared/timeUtils.py b/modules/shared/timeUtils.py
index 4d766579..79dcd762 100644
--- a/modules/shared/timeUtils.py
+++ b/modules/shared/timeUtils.py
@@ -5,14 +5,85 @@ Timezone utilities for consistent timestamp handling across the gateway.
Ensures all timestamps are properly handled as UTC.
"""
+from contextvars import ContextVar
from datetime import datetime, timezone
from typing import Optional, Any
import time
import logging
-# Configure logger
+try:
+ from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
+except ImportError:
+ ZoneInfo = None
+ ZoneInfoNotFoundError = Exception
+
logger = logging.getLogger(__name__)
+# ---------------------------------------------------------------------------
+# Per-request user timezone (set by middleware from X-User-Timezone header)
+#
+# Mirrors the i18n language ContextVar pattern in modules.shared.i18nRegistry:
+# the browser knows its IANA timezone (Intl.DateTimeFormat().resolvedOptions().timeZone),
+# the frontend axios interceptor sends it as X-User-Timezone, and the gateway
+# middleware writes it into _CURRENT_TIMEZONE for any handler/agent to read.
+#
+# Storage stays UTC everywhere (getUtcTimestamp / getIsoTimestamp). Only
+# user-visible "what is now?" decisions (AI-agent prompts, formatted display
+# strings) should consult getRequestTimezone() / getRequestNow().
+# ---------------------------------------------------------------------------
+
+_DEFAULT_REQUEST_TZ = "UTC"
+_CURRENT_TIMEZONE: ContextVar[str] = ContextVar("user_tz", default=_DEFAULT_REQUEST_TZ)
+
+
+def _setRequestTimezone(tzName: str) -> None:
+ """Set the current request's user timezone (called by gateway middleware).
+
+ Validates against zoneinfo; falls back to UTC for unknown/invalid names so
+ a malicious or stale header cannot break downstream code.
+ """
+ if not tzName or not isinstance(tzName, str):
+ _CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
+ return
+ if ZoneInfo is None:
+ _CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
+ return
+ try:
+ ZoneInfo(tzName)
+ except (ZoneInfoNotFoundError, ValueError, OSError) as e:
+ logger.warning(
+ "Invalid timezone in X-User-Timezone header: %r (%s); falling back to %s",
+ tzName, type(e).__name__, _DEFAULT_REQUEST_TZ,
+ )
+ _CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
+ return
+ _CURRENT_TIMEZONE.set(tzName)
+
+
+def getRequestTimezone() -> str:
+ """Return the IANA timezone name for the current request (browser-supplied).
+
+ Defaults to ``UTC`` outside of an HTTP request context (e.g. scheduler) or
+ when the frontend did not send the header.
+ """
+ return _CURRENT_TIMEZONE.get()
+
+
+def getRequestNow() -> datetime:
+ """Return current time as a timezone-aware datetime in the request's user TZ.
+
+ Use this for **user-visible** time values (agent prompts, formatted strings).
+ Use ``getUtcNow()`` / ``getUtcTimestamp()`` for storage and DB writes.
+ """
+ tzName = getRequestTimezone()
+ if ZoneInfo is None:
+ return datetime.now(timezone.utc)
+ try:
+ return datetime.now(ZoneInfo(tzName))
+ except (ZoneInfoNotFoundError, ValueError, OSError):
+ return datetime.now(timezone.utc)
+
+
def getUtcNow() -> datetime:
"""
Get current time in UTC with timezone info.
diff --git a/pytest.ini b/pytest.ini
index 0a8eb39c..18562177 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -8,9 +8,13 @@ log_file = logs/test_logs.log
log_file_level = INFO
log_file_format = %(asctime)s %(levelname)s %(message)s
log_file_date_format = %Y-%m-%d %H:%M:%S
-# Only run non-expensive tests by default, verbose log, short traceback
+# Only run non-expensive and non-live tests by default, verbose log, short traceback
# Use 'pytest -m ""' to run ALL tests.
-addopts = -v --tb=short -m 'not expensive'
+addopts = -v --tb=short -m 'not expensive and not live'
+
+markers =
+ expensive: tests that take longer than a few seconds (e.g. heavy DB or AI)
+ live: integration tests that hit a live external service (e.g. Redmine SSS sandbox)
# Suppress deprecation warnings from third-party libraries
filterwarnings =
diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/fixtures/loadRedmineSnapshot.py b/tests/fixtures/loadRedmineSnapshot.py
new file mode 100644
index 00000000..e0a501d7
--- /dev/null
+++ b/tests/fixtures/loadRedmineSnapshot.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Load ``redmineSnapshot.json`` into in-memory ``RedmineTicketDto`` objects.
+
+Used by all stats / orphan unit tests so they do not require any DB,
+HTTP or live Redmine access.
+"""
+
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import List, Optional, Tuple
+
+from modules.features.redmine.datamodelRedmine import (
+ RedmineFieldChoiceDto,
+ RedmineFieldSchemaDto,
+ RedmineRelationDto,
+ RedmineTicketDto,
+)
+
+_SNAPSHOT_PATH = Path(__file__).parent / "redmineSnapshot.json"
+
+
+def loadSnapshot() -> Tuple[RedmineFieldSchemaDto, List[RedmineTicketDto]]:
+ """Return ``(schema, tickets)`` parsed from the JSON fixture."""
+ with _SNAPSHOT_PATH.open("r", encoding="utf-8") as f:
+ raw = json.load(f)
+
+ schema_raw = raw.get("schema") or {}
+ trackers_raw = schema_raw.get("trackers") or []
+ schema = RedmineFieldSchemaDto(
+ projectId=str(schema_raw.get("projectId") or ""),
+ projectName=str(schema_raw.get("projectName") or ""),
+ trackers=[RedmineFieldChoiceDto(**t) for t in trackers_raw],
+ statuses=[RedmineFieldChoiceDto(**s) for s in schema_raw.get("statuses") or []],
+ priorities=[RedmineFieldChoiceDto(**p) for p in schema_raw.get("priorities") or []],
+ users=[RedmineFieldChoiceDto(**u) for u in schema_raw.get("users") or []],
+ customFields=[],
+ rootTrackerName="Userstory",
+ rootTrackerId=_findRootTrackerId(trackers_raw),
+ )
+
+ tickets: List[RedmineTicketDto] = []
+ for issue in raw.get("issues") or []:
+ tickets.append(
+ RedmineTicketDto(
+ id=int(issue["id"]),
+ subject=str(issue.get("subject") or ""),
+ trackerId=issue.get("trackerId"),
+ trackerName=issue.get("trackerName"),
+ statusId=issue.get("statusId"),
+ statusName=issue.get("statusName"),
+ isClosed=bool(issue.get("isClosed")),
+ priorityId=issue.get("priorityId"),
+ priorityName=issue.get("priorityName"),
+ assignedToId=issue.get("assignedToId"),
+ assignedToName=issue.get("assignedToName"),
+ parentId=issue.get("parentId"),
+ createdOn=issue.get("createdOn"),
+ updatedOn=issue.get("updatedOn"),
+ relations=[RedmineRelationDto(**r) for r in issue.get("relations") or []],
+ )
+ )
+ return schema, tickets
+
+
+def _findRootTrackerId(trackers) -> Optional[int]:
+ for t in trackers:
+ name = str(t.get("name") or "").strip().lower()
+ if name in ("userstory", "user story", "user-story"):
+ return int(t.get("id"))
+ return None
diff --git a/tests/fixtures/redmineSnapshot.json b/tests/fixtures/redmineSnapshot.json
new file mode 100644
index 00000000..dad9b6e8
--- /dev/null
+++ b/tests/fixtures/redmineSnapshot.json
@@ -0,0 +1,98 @@
+{
+ "_doc": "Synthetic Redmine snapshot for unit tests. Replace with real data via captureRedmineSnapshot.py against the SSS sandbox once the live tests are green.",
+ "schema": {
+ "projectId": "demo-project",
+ "projectName": "Demo Project",
+ "trackers": [
+ {"id": 1, "name": "Userstory"},
+ {"id": 2, "name": "Feature"},
+ {"id": 3, "name": "Acc.Crit"},
+ {"id": 4, "name": "Bug"},
+ {"id": 5, "name": "Task"}
+ ],
+ "statuses": [
+ {"id": 1, "name": "Neu", "isClosed": false},
+ {"id": 2, "name": "In Bearbeitung", "isClosed": false},
+ {"id": 3, "name": "Review", "isClosed": false},
+ {"id": 4, "name": "Erledigt", "isClosed": true},
+ {"id": 5, "name": "Geschlossen", "isClosed": true}
+ ],
+ "priorities": [
+ {"id": 1, "name": "Niedrig"},
+ {"id": 2, "name": "Normal"},
+ {"id": 3, "name": "Hoch"}
+ ],
+ "users": [
+ {"id": 11, "name": "Anna Beispiel"},
+ {"id": 12, "name": "Bruno Test"}
+ ],
+ "customFields": []
+ },
+ "issues": [
+ {
+ "id": 1001,
+ "subject": "Mandanten-Setup automatisieren",
+ "trackerId": 1, "trackerName": "Userstory",
+ "statusId": 2, "statusName": "In Bearbeitung", "isClosed": false,
+ "priorityId": 2, "priorityName": "Normal",
+ "assignedToId": 11, "assignedToName": "Anna Beispiel",
+ "createdOn": "2026-02-01T10:00:00Z", "updatedOn": "2026-04-10T09:00:00Z",
+ "relations": []
+ },
+ {
+ "id": 2001,
+ "subject": "Onboarding-Wizard UX",
+ "trackerId": 2, "trackerName": "Feature",
+ "statusId": 1, "statusName": "Neu", "isClosed": false,
+ "priorityId": 2, "priorityName": "Normal",
+ "assignedToId": 12, "assignedToName": "Bruno Test",
+ "createdOn": "2026-02-05T12:00:00Z", "updatedOn": "2026-03-01T08:00:00Z",
+ "relations": [
+ {"id": 901, "issueId": 2001, "issueToId": 1001, "relationType": "relates", "delay": null}
+ ]
+ },
+ {
+ "id": 3001,
+ "subject": "AC: Wizard-Schritt 1 muss Mandant erkennen",
+ "trackerId": 3, "trackerName": "Acc.Crit",
+ "statusId": 4, "statusName": "Erledigt", "isClosed": true,
+ "priorityId": 2, "priorityName": "Normal",
+ "assignedToId": 12, "assignedToName": "Bruno Test",
+ "parentId": 2001,
+ "createdOn": "2026-02-10T08:00:00Z", "updatedOn": "2026-04-08T15:30:00Z",
+ "relations": []
+ },
+ {
+ "id": 4001,
+ "subject": "Bug: Wizard friert ein bei leerem Mandanten",
+ "trackerId": 4, "trackerName": "Bug",
+ "statusId": 5, "statusName": "Geschlossen", "isClosed": true,
+ "priorityId": 3, "priorityName": "Hoch",
+ "assignedToId": 11, "assignedToName": "Anna Beispiel",
+ "createdOn": "2026-03-15T12:00:00Z", "updatedOn": "2026-04-12T11:00:00Z",
+ "relations": [
+ {"id": 902, "issueId": 4001, "issueToId": 2001, "relationType": "blocks", "delay": null}
+ ]
+ },
+ {
+ "id": 5001,
+ "subject": "Orphan: Refactor altes Logging-Modul",
+ "trackerId": 5, "trackerName": "Task",
+ "statusId": 1, "statusName": "Neu", "isClosed": false,
+ "priorityId": 1, "priorityName": "Niedrig",
+ "assignedToId": null, "assignedToName": null,
+ "createdOn": "2025-09-15T08:00:00Z", "updatedOn": "2025-10-01T10:00:00Z",
+ "relations": []
+ },
+ {
+ "id": 5002,
+ "subject": "Orphan: Doku Schemamigration",
+ "trackerId": 5, "trackerName": "Task",
+ "statusId": 2, "statusName": "In Bearbeitung", "isClosed": false,
+ "priorityId": 2, "priorityName": "Normal",
+ "assignedToId": 11, "assignedToName": "Anna Beispiel",
+ "createdOn": "2026-01-10T08:00:00Z", "updatedOn": "2026-02-15T10:00:00Z",
+ "relations": []
+ }
+ ]
+}
diff --git a/tests/test_service_redmine_orphans.py b/tests/test_service_redmine_orphans.py
new file mode 100644
index 00000000..f5a22c7a
--- /dev/null
+++ b/tests/test_service_redmine_orphans.py
@@ -0,0 +1,48 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Pure-Python unit tests for the orphan detection in
+``serviceRedmineStats._countOrphans``.
+
+Snapshot in ``tests/fixtures/redmineSnapshot.json`` contains:
+- 1x Userstory (1001) -- root
+- 1x Feature (2001) related to 1001 -> reachable
+- 1x Acc.Crit (3001) parent=2001 -> reachable
+- 1x Bug (4001) blocks 2001 -> reachable via relation
+- 2x Task (5001, 5002) -> orphan (no link to any User Story)
+"""
+
+from __future__ import annotations
+
+from modules.features.redmine.serviceRedmineStats import _countOrphans
+from tests.fixtures.loadRedmineSnapshot import loadSnapshot
+
+
+class TestCountOrphans:
+ def test_orphansFromSnapshot(self) -> None:
+ schema, tickets = loadSnapshot()
+ orphans = _countOrphans(tickets, schema.rootTrackerId)
+ assert orphans == 2, "Two unrelated Tasks should be orphans"
+
+ def test_emptyListReturnsZero(self) -> None:
+ assert _countOrphans([], 1) == 0
+
+ def test_noRootTrackerCountsAllAsOrphan(self) -> None:
+ schema, tickets = loadSnapshot()
+ # Pretend there is no User Story tracker at all -- every ticket is orphan.
+ assert _countOrphans(tickets, None) == len(tickets)
+
+ def test_relationDirectionAgnostic(self) -> None:
+ """A ticket reachable via the *target* side of a relation must not
+ be counted as orphan -- _countOrphans walks both directions."""
+ _, tickets = loadSnapshot()
+ bug = next(t for t in tickets if t.id == 4001)
+ # Bug 4001 -[blocks]-> 2001; it is the source. Reverse it: 2001 -[blocks]-> 4001
+ bug.relations = []
+ # Attach the relation on 2001 instead.
+ feature = next(t for t in tickets if t.id == 2001)
+ from modules.features.redmine.datamodelRedmine import RedmineRelationDto
+ feature.relations.append(
+ RedmineRelationDto(id=999, issueId=2001, issueToId=4001, relationType="blocks", delay=None)
+ )
+ orphans = _countOrphans(tickets, 1)
+ assert orphans == 2 # Tasks remain orphans, Bug is still reachable
diff --git a/tests/test_service_redmine_stats.py b/tests/test_service_redmine_stats.py
new file mode 100644
index 00000000..310c15c7
--- /dev/null
+++ b/tests/test_service_redmine_stats.py
@@ -0,0 +1,122 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Unit tests for the pure aggregation in ``serviceRedmineStats._aggregate``.
+
+These tests run the whole bucket / KPI / aging logic against the static
+fixture, with no I/O and no service / connector / DB.
+"""
+
+from __future__ import annotations
+
+import datetime as _dt
+
+from modules.features.redmine.serviceRedmineStats import (
+ _aggregate,
+ _backlogAging,
+ _bucketKey,
+ _kpis,
+ _relationDistribution,
+ _statusByTracker,
+ _throughput,
+ _topAssignees,
+)
+from tests.fixtures.loadRedmineSnapshot import loadSnapshot
+
+
+class TestKpis:
+ def test_kpisCountTotalsCorrectly(self) -> None:
+ schema, tickets = loadSnapshot()
+ kpis = _kpis(tickets, schema.rootTrackerId, periodFrom=None, periodTo=None)
+ assert kpis.total == 6
+ assert kpis.open == 4
+ assert kpis.closed == 2
+ assert kpis.orphans == 2
+
+ def test_periodFiltersClosedAndCreated(self) -> None:
+ schema, tickets = loadSnapshot()
+ period_from = _dt.datetime(2026, 4, 1)
+ period_to = _dt.datetime(2026, 4, 30)
+ kpis = _kpis(tickets, schema.rootTrackerId, period_from, period_to)
+ assert kpis.closedInPeriod == 2 # 3001 + 4001 closed in April
+ assert kpis.createdInPeriod == 0 # nothing was created in April
+
+
+class TestStatusByTracker:
+ def test_buildsOneEntryPerTracker(self) -> None:
+ schema, tickets = loadSnapshot()
+ rows = _statusByTracker(tickets, schema)
+ names = {r.trackerName for r in rows}
+ assert names == {"Userstory", "Feature", "Acc.Crit", "Bug", "Task"}
+ task_row = next(r for r in rows if r.trackerName == "Task")
+ assert task_row.total == 2
+ assert sum(task_row.countsByStatus.values()) == 2
+
+
+class TestThroughput:
+ def test_bucketByMonthCountsClosed(self) -> None:
+ _schema, tickets = loadSnapshot()
+ period_from = _dt.datetime(2026, 4, 1)
+ period_to = _dt.datetime(2026, 4, 30)
+ out = _throughput(tickets, period_from, period_to, "month")
+ keys = [b.bucketKey for b in out]
+ assert "2026-04" in keys
+ april = next(b for b in out if b.bucketKey == "2026-04")
+ assert april.closed == 2
+ assert april.created == 0
+
+ def test_bucketByWeekIsoFormat(self) -> None:
+ when = _dt.datetime(2026, 4, 15)
+ key = _bucketKey(when, "week")
+ assert key.startswith("2026-W")
+
+
+class TestTopAssignees:
+ def test_excludesClosedTickets(self) -> None:
+ _schema, tickets = loadSnapshot()
+ rows = _topAssignees(tickets, limit=10)
+ names = {r.name for r in rows}
+ # Anna has 1 open (1001), Bruno has 1 open (2001), unassigned has 1 (5001).
+ assert "Anna Beispiel" in names
+ assert "Bruno Test" in names
+ assert "(nicht zugewiesen)" in names
+
+
+class TestRelationDistribution:
+ def test_dedupesByRelationId(self) -> None:
+ _schema, tickets = loadSnapshot()
+ rows = _relationDistribution(tickets)
+ types = {r.relationType for r in rows}
+ assert "relates" in types
+ assert "blocks" in types
+ for r in rows:
+ assert r.count >= 1
+
+
+class TestBacklogAging:
+ def test_oldOrphansLandInOlderBuckets(self) -> None:
+ _schema, tickets = loadSnapshot()
+ now = _dt.datetime(2026, 5, 1)
+ buckets = _backlogAging(tickets, now=now)
+ gt180 = next(b for b in buckets if b.bucketKey == "gt180")
+ assert gt180.count >= 1
+
+
+class TestAggregateEndToEnd:
+ def test_aggregateProducesAllSections(self) -> None:
+ schema, tickets = loadSnapshot()
+ dto = _aggregate(
+ tickets,
+ schema=schema,
+ rootTrackerId=schema.rootTrackerId,
+ dateFrom="2026-04-01",
+ dateTo="2026-04-30",
+ bucket="month",
+ trackerIdsFilter=[],
+ instanceId="test-instance",
+ )
+ assert dto.instanceId == "test-instance"
+ assert dto.kpis.total == 6
+ assert dto.kpis.orphans == 2
+ assert len(dto.statusByTracker) == 5
+ assert any(b.bucketKey == "2026-04" for b in dto.throughput)
+ assert dto.backlogAging[-1].bucketKey == "gt180"
diff --git a/tests/test_service_redmine_stats_cache.py b/tests/test_service_redmine_stats_cache.py
new file mode 100644
index 00000000..35a76390
--- /dev/null
+++ b/tests/test_service_redmine_stats_cache.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2026 Patrick Motsch
+# All rights reserved.
+"""Unit tests for ``RedmineStatsCache``.
+
+Verifies TTL expiry, key composition, instance invalidation and process-wide
+singleton behaviour.
+"""
+
+from __future__ import annotations
+
+import time
+
+from modules.features.redmine.serviceRedmineStatsCache import (
+ RedmineStatsCache,
+ _getStatsCache,
+)
+
+
+class TestRedmineStatsCache:
+ def test_getReturnsNoneOnMiss(self) -> None:
+ c = RedmineStatsCache(ttlSeconds=60)
+ key = c.buildKey("inst-a", "2026-01-01", "2026-01-31", "week", [1, 2])
+ assert c.get(key) is None
+
+ def test_setAndGetRoundTrip(self) -> None:
+ c = RedmineStatsCache(ttlSeconds=60)
+ key = c.buildKey("inst-a", None, None, "week", [])
+ c.set(key, {"answer": 42})
+ assert c.get(key) == {"answer": 42}
+
+ def test_keyIsOrderInsensitiveForTrackerIds(self) -> None:
+ c = RedmineStatsCache()
+ k1 = c.buildKey("inst-a", None, None, "week", [3, 1, 2])
+ k2 = c.buildKey("inst-a", None, None, "week", [1, 2, 3])
+ assert k1 == k2
+
+ def test_ttlExpiry(self) -> None:
+ c = RedmineStatsCache(ttlSeconds=0.05)
+ key = c.buildKey("inst-a", None, None, "week", [])
+ c.set(key, "value")
+ time.sleep(0.06)
+ assert c.get(key) is None
+
+ def test_invalidateInstanceDropsAllKeysForThatInstance(self) -> None:
+ c = RedmineStatsCache(ttlSeconds=60)
+ c.set(c.buildKey("inst-a", None, None, "week", []), "v1")
+ c.set(c.buildKey("inst-a", "2026-01-01", "2026-01-31", "month", [1]), "v2")
+ c.set(c.buildKey("inst-b", None, None, "week", []), "v3")
+ dropped = c.invalidateInstance("inst-a")
+ assert dropped == 2
+ assert c.get(c.buildKey("inst-a", None, None, "week", [])) is None
+ assert c.get(c.buildKey("inst-b", None, None, "week", [])) == "v3"
+
+ def test_singletonIsStable(self) -> None:
+ a = _getStatsCache()
+ b = _getStatsCache()
+ assert a is b