Merge pull request #140 from valueonag/int

Int
This commit is contained in:
Patrick Motsch 2026-04-22 00:03:54 +02:00 committed by GitHub
commit d3551f0287
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
55 changed files with 6193 additions and 147 deletions

8
app.py
View file

@ -519,14 +519,18 @@ from modules.auth import (
ProactiveTokenRefreshMiddleware,
)
# i18n language detection middleware (sets per-request language from Accept-Language header)
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
# without having to thread them through every call site.
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
from modules.shared.timeUtils import _setRequestTimezone
@app.middleware("http")
async def _i18nMiddleware(request: Request, call_next):
async def _requestContextMiddleware(request: Request, call_next):
acceptLang = request.headers.get("Accept-Language", "")
lang = normalizePrimaryLanguageTag(acceptLang, "de")
_setLanguage(lang)
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
return await call_next(request)
app.add_middleware(CSRFMiddleware)

View file

@ -0,0 +1,419 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine REST connector.
Async / aiohttp port of the SSS pilot client
(``pamocreate/projects/valueon/sss/project_mars/redmine-sync/code/_redmineClient.py``)
plus the read-side helpers required by ``serviceRedmine`` and
``serviceRedmineStats``.
Auth: ``X-Redmine-API-Key`` header. The key is *never* logged.
Idempotency / safety:
- ``DELETE /issues/{id}`` is often forbidden in Redmine (HTTP 403).
``deleteIssue`` returns ``False`` instead of raising in that case so
the higher layer can fall back to status-based archival.
- A small ``_throttleSeconds`` delay (default 150 ms) is awaited after
every write call to keep the SSS server happy.
"""
from __future__ import annotations
import asyncio
import logging
from typing import Any, Dict, List, Optional, Tuple
from urllib.parse import urlencode
import aiohttp
from modules.datamodels.datamodelTickets import TicketBase, TicketFieldAttribute
logger = logging.getLogger(__name__)
class RedmineApiError(RuntimeError):
"""Raised when the Redmine API returns a non-success status."""
def __init__(self, status: int, body: str, method: str, path: str):
self.status = status
self.body = body
self.method = method
self.path = path
super().__init__(f"Redmine {method} {path} failed: HTTP {status} {body[:300]}")
class ConnectorTicketsRedmine(TicketBase):
"""Async Redmine connector. One instance per (baseUrl, apiKey, projectId)."""
def __init__(
self,
*,
baseUrl: str,
apiKey: str,
projectId: str,
throttleSeconds: float = 0.15,
timeoutSeconds: float = 30.0,
) -> None:
if not baseUrl:
raise ValueError("Redmine baseUrl is required")
if not apiKey:
raise ValueError("Redmine apiKey is required")
self._baseUrl = baseUrl.rstrip("/")
self._apiKey = apiKey
self._projectId = str(projectId) if projectId is not None else ""
self._throttleSeconds = max(0.0, float(throttleSeconds))
self._timeoutSeconds = float(timeoutSeconds)
# ------------------------------------------------------------------
# Low-level
# ------------------------------------------------------------------
def _headers(self) -> Dict[str, str]:
return {
"X-Redmine-API-Key": self._apiKey,
"Content-Type": "application/json",
"Accept": "application/json",
}
async def _call(
self,
method: str,
path: str,
*,
payload: Optional[Dict[str, Any]] = None,
params: Optional[Dict[str, Any]] = None,
) -> Tuple[int, Optional[Dict[str, Any]], str]:
"""Single REST call. Returns ``(status, json_or_none, raw_body)``.
Does *not* raise -- the caller decides whether a non-2xx is fatal
(e.g. 403 on DELETE is expected and handled).
"""
url = f"{self._baseUrl}{path}"
if params:
url = f"{url}?{urlencode(params)}"
timeout = aiohttp.ClientTimeout(total=self._timeoutSeconds)
try:
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.request(method, url, headers=self._headers(), json=payload) as resp:
raw = await resp.text()
parsed: Optional[Dict[str, Any]] = None
if raw:
try:
parsed = await resp.json(content_type=None)
except Exception:
parsed = None
return resp.status, parsed, raw
except aiohttp.ClientError as e:
logger.warning(f"Redmine {method} {path} client error: {e}")
return -1, None, f"ClientError: {e}"
except asyncio.TimeoutError:
logger.warning(f"Redmine {method} {path} timeout after {self._timeoutSeconds}s")
return -1, None, "Timeout"
@staticmethod
def _isOk(status: int) -> bool:
return 200 <= status < 300
async def _gentle(self) -> None:
if self._throttleSeconds > 0:
await asyncio.sleep(self._throttleSeconds)
# ------------------------------------------------------------------
# Identity / health
# ------------------------------------------------------------------
async def whoAmI(self) -> Dict[str, Any]:
status, body, raw = await self._call("GET", "/users/current.json")
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", "/users/current.json")
return body.get("user", {})
# ------------------------------------------------------------------
# Project meta -- trackers, statuses, priorities, custom fields, users
# ------------------------------------------------------------------
async def getTrackers(self) -> List[Dict[str, Any]]:
status, body, raw = await self._call("GET", "/trackers.json")
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", "/trackers.json")
return body.get("trackers", []) or []
async def getStatuses(self) -> List[Dict[str, Any]]:
status, body, raw = await self._call("GET", "/issue_statuses.json")
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", "/issue_statuses.json")
return body.get("issue_statuses", []) or []
async def getPriorities(self) -> List[Dict[str, Any]]:
status, body, raw = await self._call(
"GET", "/enumerations/issue_priorities.json"
)
if not self._isOk(status) or not body:
return []
return body.get("issue_priorities", []) or []
async def getCustomFields(self) -> List[Dict[str, Any]]:
"""Requires admin privileges in Redmine. Returns ``[]`` if forbidden."""
status, body, raw = await self._call("GET", "/custom_fields.json")
if status == 403 or status == 401:
logger.info("Redmine /custom_fields.json forbidden -- using per-issue field discovery")
return []
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", "/custom_fields.json")
return body.get("custom_fields", []) or []
async def getProjectUsers(self) -> List[Dict[str, Any]]:
status, body, raw = await self._call(
"GET", f"/projects/{self._projectId}/memberships.json", params={"limit": 100}
)
if not self._isOk(status) or not body:
return []
members = body.get("memberships", []) or []
users: List[Dict[str, Any]] = []
seen: set[int] = set()
for m in members:
user = m.get("user")
if not user:
continue
uid = user.get("id")
if uid in seen:
continue
seen.add(uid)
users.append(user)
return users
async def getProjectInfo(self) -> Dict[str, Any]:
status, body, raw = await self._call("GET", f"/projects/{self._projectId}.json")
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", f"/projects/{self._projectId}.json")
return body.get("project", {})
async def getIssueCategories(self) -> List[Dict[str, Any]]:
"""Per-project issue categories. Returns ``[]`` if the endpoint
is forbidden or the project has no categories defined."""
path = f"/projects/{self._projectId}/issue_categories.json"
status, body, raw = await self._call("GET", path)
if status in (401, 403, 404) or not self._isOk(status) or not body:
return []
return body.get("issue_categories", []) or []
# ------------------------------------------------------------------
# Issues -- read
# ------------------------------------------------------------------
async def getIssue(
self, issueId: int, *, includeRelations: bool = True, includeChildren: bool = False
) -> Dict[str, Any]:
includes = ["custom_fields", "journals"]
if includeRelations:
includes.append("relations")
if includeChildren:
includes.append("children")
params = {"include": ",".join(includes)}
status, body, raw = await self._call("GET", f"/issues/{issueId}.json", params=params)
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", f"/issues/{issueId}.json")
return body.get("issue", {})
async def listIssues(
self,
*,
trackerId: Optional[int] = None,
statusId: Optional[str] = "*",
updatedOnFrom: Optional[str] = None,
updatedOnTo: Optional[str] = None,
createdOnFrom: Optional[str] = None,
createdOnTo: Optional[str] = None,
assignedToId: Optional[int] = None,
subjectContains: Optional[str] = None,
limit: int = 100,
offset: int = 0,
include: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Single-page list. Returns the raw envelope ``{issues, total_count, offset, limit}``."""
params: Dict[str, Any] = {
"project_id": self._projectId,
"limit": str(limit),
"offset": str(offset),
}
if statusId is not None:
params["status_id"] = str(statusId)
if trackerId is not None:
params["tracker_id"] = str(trackerId)
if assignedToId is not None:
params["assigned_to_id"] = str(assignedToId)
if subjectContains:
params["subject"] = f"~{subjectContains}"
if updatedOnFrom and updatedOnTo:
params["updated_on"] = f"><{updatedOnFrom}|{updatedOnTo}"
elif updatedOnFrom:
params["updated_on"] = f">={updatedOnFrom}"
elif updatedOnTo:
params["updated_on"] = f"<={updatedOnTo}"
if createdOnFrom and createdOnTo:
params["created_on"] = f"><{createdOnFrom}|{createdOnTo}"
elif createdOnFrom:
params["created_on"] = f">={createdOnFrom}"
elif createdOnTo:
params["created_on"] = f"<={createdOnTo}"
if include:
params["include"] = ",".join(include)
status, body, raw = await self._call("GET", "/issues.json", params=params)
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "GET", "/issues.json")
return body
async def listAllIssues(
self,
*,
trackerId: Optional[int] = None,
statusId: Optional[str] = "*",
updatedOnFrom: Optional[str] = None,
updatedOnTo: Optional[str] = None,
createdOnFrom: Optional[str] = None,
createdOnTo: Optional[str] = None,
assignedToId: Optional[int] = None,
pageSize: int = 100,
maxPages: int = 50,
include: Optional[List[str]] = None,
) -> List[Dict[str, Any]]:
"""Paginate ``listIssues`` and return all matching raw issues."""
all_issues: List[Dict[str, Any]] = []
offset = 0
for _page in range(maxPages):
envelope = await self.listIssues(
trackerId=trackerId,
statusId=statusId,
updatedOnFrom=updatedOnFrom,
updatedOnTo=updatedOnTo,
createdOnFrom=createdOnFrom,
createdOnTo=createdOnTo,
assignedToId=assignedToId,
limit=pageSize,
offset=offset,
include=include,
)
page_issues = envelope.get("issues", []) or []
all_issues.extend(page_issues)
total = int(envelope.get("total_count") or 0)
offset += len(page_issues)
if not page_issues or offset >= total:
break
return all_issues
async def listRelations(self, issueId: int) -> List[Dict[str, Any]]:
issue = await self.getIssue(issueId, includeRelations=True)
return issue.get("relations", []) or []
# ------------------------------------------------------------------
# Issues -- write
# ------------------------------------------------------------------
async def createIssue(self, fields: Dict[str, Any]) -> Dict[str, Any]:
body_in = {"issue": dict(fields)}
body_in["issue"].setdefault("project_id", self._projectId)
status, body, raw = await self._call("POST", "/issues.json", payload=body_in)
await self._gentle()
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "POST", "/issues.json")
return body.get("issue", {})
async def updateIssue(
self, issueId: int, fields: Dict[str, Any], *, notes: Optional[str] = None
) -> bool:
body_in: Dict[str, Any] = {"issue": dict(fields)}
if notes:
body_in["issue"]["notes"] = notes
status, body, raw = await self._call("PUT", f"/issues/{issueId}.json", payload=body_in)
await self._gentle()
if status == 204:
return True
if not self._isOk(status):
raise RedmineApiError(status, raw, "PUT", f"/issues/{issueId}.json")
return True
async def deleteIssue(self, issueId: int) -> bool:
"""Returns ``False`` if Redmine forbids deletion (HTTP 403/401)."""
status, body, raw = await self._call("DELETE", f"/issues/{issueId}.json")
await self._gentle()
if status in (200, 204):
return True
if status in (401, 403):
logger.info(f"Redmine DELETE issue {issueId} forbidden ({status}) -- caller should fall back")
return False
raise RedmineApiError(status, raw, "DELETE", f"/issues/{issueId}.json")
# ------------------------------------------------------------------
# Relations -- write
# ------------------------------------------------------------------
async def addRelation(
self, fromId: int, toId: int, *, relationType: str = "relates", delay: Optional[int] = None
) -> Dict[str, Any]:
rel: Dict[str, Any] = {"issue_to_id": toId, "relation_type": relationType}
if delay is not None:
rel["delay"] = int(delay)
status, body, raw = await self._call(
"POST", f"/issues/{fromId}/relations.json", payload={"relation": rel}
)
await self._gentle()
if not self._isOk(status) or not body:
raise RedmineApiError(status, raw, "POST", f"/issues/{fromId}/relations.json")
return body.get("relation", {})
async def deleteRelation(self, relationId: int) -> bool:
status, body, raw = await self._call("DELETE", f"/relations/{relationId}.json")
await self._gentle()
if status in (200, 204):
return True
if status in (401, 403):
return False
raise RedmineApiError(status, raw, "DELETE", f"/relations/{relationId}.json")
# ------------------------------------------------------------------
# TicketBase compliance (used by AI-tool path)
# ------------------------------------------------------------------
async def readAttributes(self) -> List[TicketFieldAttribute]:
"""Static base attributes + project custom fields (best-effort)."""
attrs: List[TicketFieldAttribute] = [
TicketFieldAttribute(fieldName="Subject", field="subject"),
TicketFieldAttribute(fieldName="Description", field="description"),
TicketFieldAttribute(fieldName="Tracker", field="tracker_id"),
TicketFieldAttribute(fieldName="Status", field="status_id"),
TicketFieldAttribute(fieldName="Priority", field="priority_id"),
TicketFieldAttribute(fieldName="Assignee", field="assigned_to_id"),
TicketFieldAttribute(fieldName="Parent", field="parent_issue_id"),
TicketFieldAttribute(fieldName="Target Version", field="fixed_version_id"),
]
try:
cfs = await self.getCustomFields()
except Exception:
cfs = []
for cf in cfs:
try:
attrs.append(
TicketFieldAttribute(
fieldName=str(cf.get("name", f"cf_{cf.get('id')}")),
field=f"cf_{cf.get('id')}",
)
)
except Exception:
continue
return attrs
async def readTasks(self, *, limit: int = 0) -> List[Dict[str, Any]]:
if limit and limit > 0:
envelope = await self.listIssues(limit=limit)
return envelope.get("issues", []) or []
return await self.listAllIssues()
async def writeTasks(self, tasklist: List[Dict[str, Any]]) -> None:
for task in tasklist:
issue_id = task.get("id")
fields = {k: v for k, v in task.items() if k != "id"}
if issue_id:
await self.updateIssue(int(issue_id), fields)
else:
await self.createIssue(fields)

View file

@ -34,6 +34,9 @@ class _GraphApiMixin:
async def _graphPut(self, endpoint: str, data: bytes = None) -> Dict[str, Any]:
return await _makeGraphCall(self._accessToken, endpoint, "PUT", data)
async def _graphPatch(self, endpoint: str, data: Any = None) -> Dict[str, Any]:
return await _makeGraphCall(self._accessToken, endpoint, "PATCH", data)
async def _graphDelete(self, endpoint: str) -> Dict[str, Any]:
return await _makeGraphCall(self._accessToken, endpoint, "DELETE")
@ -82,6 +85,9 @@ async def _makeGraphCall(
elif method == "PUT":
async with session.put(url, **kwargs) as resp:
return await _handleResponse(resp)
elif method == "PATCH":
async with session.patch(url, **kwargs) as resp:
return await _handleResponse(resp)
elif method == "DELETE":
async with session.delete(url, **kwargs) as resp:
if resp.status in (200, 204):
@ -99,6 +105,10 @@ async def _makeGraphCall(
async def _handleResponse(resp: aiohttp.ClientResponse) -> Dict[str, Any]:
if resp.status in (200, 201):
return await resp.json()
if resp.status == 202:
return {"accepted": True}
if resp.status == 204:
return {}
errorText = await resp.text()
logger.error(f"Graph API {resp.status}: {errorText}")
return {"error": f"{resp.status}: {errorText}"}
@ -449,6 +459,265 @@ class OutlookAdapter(_GraphApiMixin, ServiceAdapter):
return result
return {"success": True, "draft": True, "messageId": result.get("id", "")}
# ------------------------------------------------------------------
# Reply / Reply-All / Forward
# ------------------------------------------------------------------
# Microsoft Graph distinguishes between "send-immediately" endpoints
# (``/reply``, ``/replyAll``, ``/forward``) and their "create-draft"
# counterparts (``/createReply``, ``/createReplyAll``, ``/createForward``).
# The send-immediately variant accepts a free-text ``comment`` string
# that Graph prepends to the original conversation; the createReply*
# variants return a fully-populated draft message that the caller can
# further edit (e.g. via PATCH /me/messages/{id} with a richer body)
# before posting via /send. We expose both flavours so the agent can
# choose between "draft for review" and "send right now".
async def replyToMail(
self, messageId: str, comment: str,
replyAll: bool = False,
) -> Dict[str, Any]:
"""Reply (or reply-all) to an existing message immediately.
Preserves the conversation thread and the ``AW:`` prefix in Outlook --
unlike sendMail() which creates a brand-new conversation.
"""
import json
endpointAction = "replyAll" if replyAll else "reply"
payload = json.dumps({"comment": comment}).encode("utf-8")
result = await self._graphPost(f"me/messages/{messageId}/{endpointAction}", payload)
if "error" in result:
return result
return {"success": True, "messageId": messageId, "action": endpointAction}
async def forwardMail(
self, messageId: str, to: List[str], comment: str = "",
) -> Dict[str, Any]:
"""Forward an existing message to new recipients."""
import json
payload = json.dumps({
"comment": comment,
"toRecipients": [{"emailAddress": {"address": addr}} for addr in to],
}).encode("utf-8")
result = await self._graphPost(f"me/messages/{messageId}/forward", payload)
if "error" in result:
return result
return {"success": True, "messageId": messageId, "action": "forward"}
async def createReplyDraft(
self, messageId: str, comment: str = "",
replyAll: bool = False,
) -> Dict[str, Any]:
"""Create a reply-draft (in the Drafts folder) that the user can edit before sending."""
import json
endpointAction = "createReplyAll" if replyAll else "createReply"
payload = json.dumps({"comment": comment}).encode("utf-8") if comment else b"{}"
result = await self._graphPost(f"me/messages/{messageId}/{endpointAction}", payload)
if "error" in result:
return result
return {"success": True, "draft": True, "messageId": result.get("id", ""), "originalMessageId": messageId}
async def createForwardDraft(
self, messageId: str, to: Optional[List[str]] = None, comment: str = "",
) -> Dict[str, Any]:
"""Create a forward-draft (in the Drafts folder) that the user can edit before sending."""
import json
body: Dict[str, Any] = {}
if comment:
body["comment"] = comment
if to:
body["toRecipients"] = [{"emailAddress": {"address": addr}} for addr in to]
payload = json.dumps(body).encode("utf-8") if body else b"{}"
result = await self._graphPost(f"me/messages/{messageId}/createForward", payload)
if "error" in result:
return result
return {"success": True, "draft": True, "messageId": result.get("id", ""), "originalMessageId": messageId}
# ------------------------------------------------------------------
# Folder-Management & Mail-Management
# ------------------------------------------------------------------
# Mapping of Microsoft Graph "well-known folder names" plus a few common
# localized display names (DE) so the LLM can write natural names like
# "Posteingang", "Archiv", "deletedItems" without having to look up the
# opaque mailbox folder ID first.
_WELL_KNOWN_FOLDERS = {
"inbox": "inbox",
"posteingang": "inbox",
"drafts": "drafts",
"entwürfe": "drafts",
"entwurf": "drafts",
"sentitems": "sentitems",
"gesendet": "sentitems",
"gesendete elemente": "sentitems",
"deleteditems": "deleteditems",
"gelöscht": "deleteditems",
"gelöschte elemente": "deleteditems",
"papierkorb": "deleteditems",
"trash": "deleteditems",
"junkemail": "junkemail",
"spam": "junkemail",
"junk": "junkemail",
"outbox": "outbox",
"postausgang": "outbox",
"archive": "archive",
"archiv": "archive",
"msgfolderroot": "msgfolderroot",
"root": "msgfolderroot",
}
async def listMailFolders(self) -> List[Dict[str, Any]]:
"""List all top-level mail folders with id, name and counts.
Returns a flat list of dicts so the caller (e.g. an LLM tool) does not
need to know the Graph nesting model. Use ``_resolveFolderId()`` to
translate a user-provided name into a Graph folder ID.
"""
folders: List[Dict[str, Any]] = []
seenIds: set = set()
endpoint: Optional[str] = "me/mailFolders?$top=100"
while endpoint:
result = await self._graphGet(endpoint)
if "error" in result:
break
for f in result.get("value", []):
fid = f.get("id")
if fid and fid not in seenIds:
seenIds.add(fid)
folders.append({
"id": fid,
"displayName": f.get("displayName", ""),
"totalItemCount": f.get("totalItemCount", 0),
"unreadItemCount": f.get("unreadItemCount", 0),
"childFolderCount": f.get("childFolderCount", 0),
})
nextLink = result.get("@odata.nextLink")
endpoint = _stripGraphBase(nextLink) if nextLink else None
return folders
async def _resolveFolderId(self, folderRef: str) -> Optional[str]:
"""Resolve any user-supplied folder reference to a Graph folder ID.
Resolution order:
1. If it matches a well-known shortcut (locale-aware), return that
shortcut directly -- Graph accepts ``inbox``, ``drafts`` etc. in
the URL path.
2. If it looks like a Graph folder ID (long base64-ish string),
return as-is.
3. Otherwise fall back to a case-insensitive ``displayName`` match
against the user's mail folders.
Returns ``None`` if nothing matches so the caller can surface a clear
error instead of silently moving mail into the wrong place.
"""
if not folderRef:
return None
ref = folderRef.strip()
wellKnown = self._WELL_KNOWN_FOLDERS.get(ref.lower())
if wellKnown:
return wellKnown
# Heuristic: Graph folder IDs are long URL-safe base64 strings; never
# contain spaces; and almost always include "==" or AAAAA padding.
if len(ref) > 60 and " " not in ref:
return ref
for f in await self.listMailFolders():
if (f.get("displayName") or "").strip().lower() == ref.lower():
return f.get("id")
return None
async def moveMail(
self, messageId: str, destinationFolder: str,
) -> Dict[str, Any]:
"""Move a message to another folder (well-known name, displayName, or folder id)."""
import json
destId = await self._resolveFolderId(destinationFolder)
if not destId:
return {"error": f"Folder not found: '{destinationFolder}'. Use listMailFolders to inspect available folders."}
payload = json.dumps({"destinationId": destId}).encode("utf-8")
result = await self._graphPost(f"me/messages/{messageId}/move", payload)
if "error" in result:
return result
return {"success": True, "messageId": result.get("id", messageId), "destinationFolder": destinationFolder}
async def copyMail(
self, messageId: str, destinationFolder: str,
) -> Dict[str, Any]:
"""Copy a message into another folder (original stays in place)."""
import json
destId = await self._resolveFolderId(destinationFolder)
if not destId:
return {"error": f"Folder not found: '{destinationFolder}'. Use listMailFolders to inspect available folders."}
payload = json.dumps({"destinationId": destId}).encode("utf-8")
result = await self._graphPost(f"me/messages/{messageId}/copy", payload)
if "error" in result:
return result
return {"success": True, "newMessageId": result.get("id", ""), "destinationFolder": destinationFolder}
async def archiveMail(self, messageId: str) -> Dict[str, Any]:
"""Move a message to the user's Archive folder.
Outlook's Archive is a regular mail folder, not a flag, so this is a
thin convenience wrapper around :py:meth:`moveMail`.
"""
return await self.moveMail(messageId, "archive")
async def deleteMail(
self, messageId: str,
*,
hardDelete: bool = False,
) -> Dict[str, Any]:
"""Delete a message.
Default behaviour (``hardDelete=False``) moves the message to the
``Deleted Items`` folder, which mirrors what users see in the Outlook
UI when they press Delete. Set ``hardDelete=True`` to perform an
unrecoverable removal -- agent tools must require an extra
confirmation before invoking this path.
"""
if hardDelete:
result = await self._graphDelete(f"me/messages/{messageId}")
if "error" in result:
return result
return {"success": True, "messageId": messageId, "hardDelete": True}
return await self.moveMail(messageId, "deleteditems")
async def markMailAsRead(self, messageId: str) -> Dict[str, Any]:
"""Mark a message as read (sets ``isRead=true``)."""
import json
payload = json.dumps({"isRead": True}).encode("utf-8")
result = await self._graphPatch(f"me/messages/{messageId}", payload)
if "error" in result:
return result
return {"success": True, "messageId": messageId, "isRead": True}
async def markMailAsUnread(self, messageId: str) -> Dict[str, Any]:
"""Mark a message as unread (sets ``isRead=false``)."""
import json
payload = json.dumps({"isRead": False}).encode("utf-8")
result = await self._graphPatch(f"me/messages/{messageId}", payload)
if "error" in result:
return result
return {"success": True, "messageId": messageId, "isRead": False}
async def flagMail(
self, messageId: str,
*,
flagStatus: str = "flagged",
) -> Dict[str, Any]:
"""Set or clear the follow-up flag on a message.
``flagStatus`` accepts ``"flagged"`` (default), ``"complete"`` or
``"notFlagged"`` -- the three values Microsoft Graph recognises for
``followupFlag.flagStatus``.
"""
import json
if flagStatus not in ("flagged", "complete", "notFlagged"):
return {"error": f"Invalid flagStatus '{flagStatus}'. Use one of: flagged, complete, notFlagged."}
payload = json.dumps({"flag": {"flagStatus": flagStatus}}).encode("utf-8")
result = await self._graphPatch(f"me/messages/{messageId}", payload)
if "error" in result:
return result
return {"success": True, "messageId": messageId, "flagStatus": flagStatus}
# ---------------------------------------------------------------------------
# Teams Adapter (Stub)

View file

@ -60,7 +60,14 @@ class FileItem(PowerOnModel):
)
fileSize: int = Field(
description="Size of the file in bytes",
json_schema_extra={"label": "Dateigroesse", "frontend_type": "integer", "frontend_readonly": True, "frontend_required": False},
json_schema_extra={
"label": "Dateigroesse",
"frontend_type": "integer",
"frontend_readonly": True,
"frontend_required": False,
# Auto-scale byte units (B / KB / MB / GB / TB), right-aligned in tables.
"frontend_format": "R:b",
},
)
tags: Optional[List[str]] = Field(
default=None,

View file

@ -162,12 +162,25 @@ class Mandate(PowerOnModel):
enabled: bool = Field(
default=True,
description="Indicates whether the mandate is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "Aktiviert"},
json_schema_extra={
"frontend_type": "checkbox",
"frontend_readonly": False,
"frontend_required": False,
"label": "Aktiviert",
# Render boolean as i18n-translatable label tuple [true, neutral, false].
"frontend_format_labels": ["Ja", "-", "Nein"],
},
)
isSystem: bool = Field(
default=False,
description="Whether this is a system mandate (e.g. root mandate). Cannot be deleted.",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False, "label": "System-Mandant"},
json_schema_extra={
"frontend_type": "checkbox",
"frontend_readonly": True,
"frontend_required": False,
"label": "System-Mandant",
"frontend_format_labels": ["Ja", "-", "Nein"],
},
)
deletedAt: Optional[float] = Field(
default=None,
@ -546,7 +559,13 @@ class User(PowerOnModel):
enabled: bool = Field(
default=True,
description="Indicates whether the user is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "Aktiviert"},
json_schema_extra={
"frontend_type": "checkbox",
"frontend_readonly": False,
"frontend_required": False,
"label": "Aktiviert",
"frontend_format_labels": ["Ja", "-", "Nein"],
},
)
isSysAdmin: bool = Field(

View file

@ -8,18 +8,20 @@ Handles feature initialization and RBAC catalog registration.
import logging
from typing import Dict, List, Any, Optional
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
# Feature metadata
FEATURE_CODE = "chatbot"
FEATURE_LABEL = "Chatbot"
FEATURE_LABEL = t("Chatbot", context="UI")
FEATURE_ICON = "mdi-robot"
# UI Objects for RBAC catalog
UI_OBJECTS = [
{
"objectKey": "ui.feature.chatbot.conversations",
"label": "Konversationen",
"label": t("Konversationen", context="UI"),
"meta": {"area": "conversations"}
}
]
@ -28,22 +30,22 @@ UI_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.chatbot.startStream",
"label": "Chat starten (Stream)",
"label": t("Chat starten (Stream)", context="UI"),
"meta": {"endpoint": "/api/chatbot/{instanceId}/start/stream", "method": "POST"}
},
{
"objectKey": "resource.feature.chatbot.stop",
"label": "Chat stoppen",
"label": t("Chat stoppen", context="UI"),
"meta": {"endpoint": "/api/chatbot/{instanceId}/stop/{workflowId}", "method": "POST"}
},
{
"objectKey": "resource.feature.chatbot.threads",
"label": "Threads abrufen",
"label": t("Threads abrufen", context="UI"),
"meta": {"endpoint": "/api/chatbot/{instanceId}/threads", "method": "GET"}
},
{
"objectKey": "resource.feature.chatbot.delete",
"label": "Chat löschen",
"label": t("Chat löschen", context="UI"),
"meta": {"endpoint": "/api/chatbot/{instanceId}/{workflowId}", "method": "DELETE"}
},
]

View file

@ -8,26 +8,28 @@ Handles feature initialization and RBAC catalog registration.
import logging
from typing import Dict, List, Any
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
FEATURE_CODE = "commcoach"
FEATURE_LABEL = "Kommunikations-Coach"
FEATURE_LABEL = t("Kommunikations-Coach", context="UI")
FEATURE_ICON = "mdi-account-voice"
UI_OBJECTS = [
{
"objectKey": "ui.feature.commcoach.dashboard",
"label": "Dashboard",
"label": t("Dashboard", context="UI"),
"meta": {"area": "dashboard"}
},
{
"objectKey": "ui.feature.commcoach.coaching",
"label": "Arbeitsthemen",
"label": t("Arbeitsthemen", context="UI"),
"meta": {"area": "coaching"}
},
{
"objectKey": "ui.feature.commcoach.settings",
"label": "Einstellungen",
"label": t("Einstellungen", context="UI"),
"meta": {"area": "settings"}
},
]
@ -36,7 +38,7 @@ DATA_OBJECTS = [
# ── Record-Hierarchie: Context → Session → Message/Score, Context → Task ──
{
"objectKey": "data.feature.commcoach.CoachingContext",
"label": "Coaching-Kontext",
"label": t("Coaching-Kontext", context="UI"),
"meta": {
"table": "CoachingContext",
"fields": ["id", "title", "category", "status", "lastSessionAt"],
@ -46,7 +48,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingSession",
"label": "Coaching-Session",
"label": t("Coaching-Session", context="UI"),
"meta": {
"table": "CoachingSession",
"fields": ["id", "contextId", "status", "summary", "startedAt", "endedAt", "competenceScore"],
@ -58,7 +60,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingMessage",
"label": "Coaching-Nachricht",
"label": t("Coaching-Nachricht", context="UI"),
"meta": {
"table": "CoachingMessage",
"fields": ["id", "sessionId", "contextId", "role", "content", "contentType"],
@ -68,7 +70,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingScore",
"label": "Coaching-Score",
"label": t("Coaching-Score", context="UI"),
"meta": {
"table": "CoachingScore",
"fields": ["id", "sessionId", "contextId", "dimension", "score", "trend"],
@ -78,7 +80,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingTask",
"label": "Coaching-Aufgabe",
"label": t("Coaching-Aufgabe", context="UI"),
"meta": {
"table": "CoachingTask",
"fields": ["id", "contextId", "title", "status", "priority", "dueDate"],
@ -89,12 +91,12 @@ DATA_OBJECTS = [
# ── Stammdaten (sessionübergreifend, scoped per userId) ──────────────────
{
"objectKey": "data.feature.commcoach.userData",
"label": "Stammdaten",
"label": t("Stammdaten", context="UI"),
"meta": {"isGroup": True}
},
{
"objectKey": "data.feature.commcoach.CoachingUserProfile",
"label": "Benutzerprofil",
"label": t("Benutzerprofil", context="UI"),
"meta": {
"table": "CoachingUserProfile",
"group": "data.feature.commcoach.userData",
@ -103,7 +105,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingPersona",
"label": "Coaching-Persona",
"label": t("Coaching-Persona", context="UI"),
"meta": {
"table": "CoachingPersona",
"group": "data.feature.commcoach.userData",
@ -112,7 +114,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.CoachingBadge",
"label": "Coaching-Auszeichnung",
"label": t("Coaching-Auszeichnung", context="UI"),
"meta": {
"table": "CoachingBadge",
"group": "data.feature.commcoach.userData",
@ -121,7 +123,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.commcoach.*",
"label": "Alle CommCoach-Daten",
"label": t("Alle CommCoach-Daten", context="UI"),
"meta": {"wildcard": True}
},
]
@ -129,27 +131,27 @@ DATA_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.commcoach.context.create",
"label": "Kontext erstellen",
"label": t("Kontext erstellen", context="UI"),
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts", "method": "POST"}
},
{
"objectKey": "resource.feature.commcoach.context.archive",
"label": "Kontext archivieren",
"label": t("Kontext archivieren", context="UI"),
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/archive", "method": "POST"}
},
{
"objectKey": "resource.feature.commcoach.session.start",
"label": "Session starten",
"label": t("Session starten", context="UI"),
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/sessions/start", "method": "POST"}
},
{
"objectKey": "resource.feature.commcoach.session.complete",
"label": "Session abschliessen",
"label": t("Session abschliessen", context="UI"),
"meta": {"endpoint": "/api/commcoach/{instanceId}/sessions/{sessionId}/complete", "method": "POST"}
},
{
"objectKey": "resource.feature.commcoach.task.manage",
"label": "Aufgaben verwalten",
"label": t("Aufgaben verwalten", context="UI"),
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/tasks", "method": "POST"}
},
]

View file

@ -8,6 +8,8 @@ Minimal bootstrap for feature instance creation. Build from here.
import logging
from typing import Dict, List, Any, Optional
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
FEATURE_CODE = "graphicalEditor"
@ -21,28 +23,28 @@ REQUIRED_SERVICES = [
{"serviceKey": "clickup", "meta": {"usage": "ClickUp actions"}},
{"serviceKey": "generation", "meta": {"usage": "file.create document rendering"}},
]
FEATURE_LABEL = "Grafischer Editor"
FEATURE_LABEL = t("Grafischer Editor", context="UI")
FEATURE_ICON = "mdi-sitemap"
UI_OBJECTS = [
{
"objectKey": "ui.feature.graphicalEditor.editor",
"label": "Editor",
"label": t("Editor", context="UI"),
"meta": {"area": "editor"}
},
{
"objectKey": "ui.feature.graphicalEditor.workflows",
"label": "Workflows",
"label": t("Workflows", context="UI"),
"meta": {"area": "workflows"}
},
{
"objectKey": "ui.feature.graphicalEditor.templates",
"label": "Vorlagen",
"label": t("Vorlagen", context="UI"),
"meta": {"area": "templates"}
},
{
"objectKey": "ui.feature.graphicalEditor.workflows-tasks",
"label": "Tasks",
"label": t("Tasks", context="UI"),
"meta": {"area": "tasks"}
},
]
@ -50,17 +52,17 @@ UI_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.graphicalEditor.dashboard",
"label": "Dashboard aufrufen",
"label": t("Dashboard aufrufen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/info", "method": "GET"}
},
{
"objectKey": "resource.feature.graphicalEditor.node-types",
"label": "Node-Typen abrufen",
"label": t("Node-Typen abrufen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/node-types", "method": "GET"}
},
{
"objectKey": "resource.feature.graphicalEditor.execute",
"label": "Workflow ausführen",
"label": t("Workflow ausführen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
},
]

View file

@ -10,6 +10,7 @@ from .sharepoint import SHAREPOINT_NODES
from .clickup import CLICKUP_NODES
from .file import FILE_NODES
from .trustee import TRUSTEE_NODES
from .redmine import REDMINE_NODES
from .data import DATA_NODES
from .context import CONTEXT_NODES
@ -23,6 +24,7 @@ STATIC_NODE_TYPES = (
+ CLICKUP_NODES
+ FILE_NODES
+ TRUSTEE_NODES
+ REDMINE_NODES
+ DATA_NODES
+ CONTEXT_NODES
)

View file

@ -0,0 +1,170 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine node definitions - map to MethodRedmine actions."""
from modules.shared.i18nRegistry import t
REDMINE_NODES = [
{
"id": "redmine.readTicket",
"category": "redmine",
"label": t("Ticket lesen"),
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
"description": t("Redmine-Ticket-ID")},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-ticket-outline", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "readTicket",
},
{
"id": "redmine.listTickets",
"category": "redmine",
"label": t("Tickets auflisten"),
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
{"name": "status", "type": "string", "required": False, "frontendType": "text",
"description": t("Status-Filter: open | closed | *"), "default": "*"},
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
"description": t("Nur Tickets dieses Benutzers (ID)")},
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-format-list-bulleted", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "listTickets",
},
{
"id": "redmine.createTicket",
"category": "redmine",
"label": t("Ticket erstellen"),
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
"description": t("Ticket-Titel")},
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Ticket-Beschreibung"), "default": ""},
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
"description": t("Status-ID (optional)")},
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
"description": t("Prioritaet-ID (optional)")},
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
"description": t("Zugewiesene Benutzer-ID (optional)")},
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
"description": t("Uebergeordnetes Ticket (optional)")},
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-ticket-plus-outline", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "createTicket",
},
{
"id": "redmine.updateTicket",
"category": "redmine",
"label": t("Ticket bearbeiten"),
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
"description": t("Ticket-ID")},
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
"description": t("Neuer Titel")},
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Neue Beschreibung")},
{"name": "trackerId", "type": "number", "required": False, "frontendType": "number",
"description": t("Neuer Tracker")},
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
"description": t("Neuer Status")},
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
"description": t("Neue Prioritaet")},
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
"description": t("Neue Zuweisung")},
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
"description": t("Neues Parent-Ticket")},
{"name": "notes", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-ticket-confirmation-outline", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "updateTicket",
},
{
"id": "redmine.getStats",
"category": "redmine",
"label": t("Statistik laden"),
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
"description": t("Zeitraum ab")},
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
"description": t("Zeitraum bis")},
{"name": "bucket", "type": "string", "required": False, "frontendType": "text",
"description": t("Bucket: day | week | month"), "default": "week"},
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-chart-bar", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "getStats",
},
{
"id": "redmine.runSync",
"category": "redmine",
"label": t("Mirror synchronisieren"),
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-database-sync", "color": "#4A6FA5", "usesAi": False},
"_method": "redmine",
"_action": "runSync",
},
]

View file

@ -8,18 +8,20 @@ Handles feature initialization and RBAC catalog registration.
import logging
from typing import Dict, List, Any
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
# Feature metadata
FEATURE_CODE = "neutralization"
FEATURE_LABEL = "Neutralisierung"
FEATURE_LABEL = t("Neutralisierung", context="UI")
FEATURE_ICON = "mdi-shield-check"
# UI Objects for RBAC catalog
UI_OBJECTS = [
{
"objectKey": "ui.feature.neutralization.playground",
"label": "Spielwiese",
"label": t("Spielwiese", context="UI"),
"meta": {"area": "playground"}
}
]
@ -28,17 +30,17 @@ UI_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.neutralization.process.text",
"label": "Text verarbeiten",
"label": t("Text verarbeiten", context="UI"),
"meta": {"endpoint": "/api/neutralization/process/text", "method": "POST"}
},
{
"objectKey": "resource.feature.neutralization.process.files",
"label": "Dateien verarbeiten",
"label": t("Dateien verarbeiten", context="UI"),
"meta": {"endpoint": "/api/neutralization/process/files", "method": "POST"}
},
{
"objectKey": "resource.feature.neutralization.config.update",
"label": "Konfiguration aktualisieren",
"label": t("Konfiguration aktualisieren", context="UI"),
"meta": {"endpoint": "/api/neutralization/config", "method": "PUT"}
},
]

View file

@ -8,30 +8,29 @@ This module also handles feature initialization and RBAC catalog registration.
import logging
# Feature metadata for RBAC catalog
from modules.shared.i18nRegistry import t
FEATURE_CODE = "realestate"
FEATURE_LABEL = "Immobilien"
FEATURE_LABEL = t("Immobilien", context="UI")
FEATURE_ICON = "mdi-home-city"
# UI Objects for RBAC catalog (only map view)
UI_OBJECTS = [
{
"objectKey": "ui.feature.realestate.dashboard",
"label": "Karte",
"label": t("Karte", context="UI"),
"meta": {"area": "dashboard"}
},
]
# Resource Objects for RBAC catalog
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.realestate.project.create",
"label": "Projekt erstellen",
"label": t("Projekt erstellen", context="UI"),
"meta": {"endpoint": "/api/realestate/project", "method": "POST"}
},
{
"objectKey": "resource.feature.realestate.project.delete",
"label": "Projekt löschen",
"label": t("Projekt löschen", context="UI"),
"meta": {"endpoint": "/api/realestate/project/{projectId}", "method": "DELETE"}
},
]

View file

@ -0,0 +1,3 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine feature container -- ticket browser, statistics, AI tools."""

View file

@ -0,0 +1,559 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine feature data models.
Two layers:
1. **Persisted** (``PowerOnModel``, auto-DDL into ``poweron_redmine``):
- ``RedmineInstanceConfig``: per-feature-instance connection + sync state.
- ``RedmineTicketMirror``: local mirror of a Redmine issue.
- ``RedmineRelationMirror``: local mirror of an issue relation.
2. **Transport** (plain Pydantic): ``Redmine*Dto`` returned over the
REST API and shared with the AI tools. The frontend (``RedmineStatsPage``)
maps the raw ``RedmineStatsDto`` buckets onto ``ReportSection`` for
``FormGeneratorReport``.
Scale: the mirror tables let us aggregate stats and render the ticket tree
for projects with 20k+ tickets without round-tripping the Redmine REST API
on every request.
"""
import uuid
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field, model_validator
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.i18nRegistry import i18nModel
def _coerceNoneToDefaults(cls, values):
"""Replace None values with each field's declared default.
Reason: Postgres rows written before we added a column return NULL for
that column, which Pydantic v2 rejects for non-Optional fields even if
a default is declared. We only apply the default when the incoming
value is explicitly None AND the field has a default (not a
default_factory that would generate a new value).
"""
if not isinstance(values, dict):
return values
for name, field in cls.model_fields.items():
if name in values and values[name] is None and field.default is not None:
values[name] = field.default
return values
# ---------------------------------------------------------------------------
# Persisted: per feature-instance Redmine connection config + sync state
# ---------------------------------------------------------------------------
@i18nModel("Redmine-Verbindung")
class RedmineInstanceConfig(PowerOnModel):
"""Per feature-instance Redmine connection config.
The API key is stored encrypted (``encryptValue`` keyed
``"redmineApiKey"``). It is never returned to the frontend in plain
text -- the route returns a boolean ``hasApiKey`` flag instead.
"""
@model_validator(mode="before")
@classmethod
def _applyDefaults(cls, values):
return _coerceNoneToDefaults(cls, values)
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Primary key",
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
featureInstanceId: str = Field(
description="FK -> FeatureInstance.id (1:1 per instance)",
json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
mandateId: Optional[str] = Field(
default=None,
description="Mandate ID (auto-set from feature instance)",
json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
baseUrl: str = Field(
default="",
description="Redmine base URL, e.g. https://redmine.logobject.ch",
json_schema_extra={"label": "Basis-URL", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
)
projectId: str = Field(
default="",
description="Redmine numeric project id or identifier (slug)",
json_schema_extra={"label": "Projekt-ID", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
)
encryptedApiKey: str = Field(
default="",
description="Encrypted Redmine API key (X-Redmine-API-Key)",
json_schema_extra={"label": "API-Key (verschluesselt)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
)
rootTrackerName: str = Field(
default="Userstory",
description="Name of the tracker used as the tree root in the browser. Set explicitly in config; resolved against the live tracker list at runtime.",
json_schema_extra={"label": "Wurzel-Tracker (Name)", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
)
defaultPeriodValue: Optional[Dict[str, Any]] = Field(
default=None,
description="Optional snapshot of a frontend ``PeriodValue`` ({preset, fromDate, toDate}) used as default period when the user opens the feature.",
json_schema_extra={"label": "Standard-Zeitraum", "frontend_type": "json", "frontend_readonly": False, "frontend_required": False},
)
schemaCache: Optional[Dict[str, Any]] = Field(
default=None,
description="Cached project meta: {trackers:[{id,name}], statuses:[{id,name,isClosed}], customFields:[{id,name,fieldFormat,possibleValues}], priorities:[...], users:[{id,name}]}",
json_schema_extra={"label": "Schema-Cache", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
)
schemaCachedAt: Optional[float] = Field(
default=None,
description="UTC timestamp when schemaCache was last refreshed",
json_schema_extra={"label": "Schema-Cache-Zeit", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
schemaCacheTtlSeconds: Optional[int] = Field(
default=24 * 60 * 60,
description="Schema cache TTL in seconds (default 24h). Optional to tolerate NULL rows from auto-DDL upgrades.",
json_schema_extra={"label": "Schema-Cache-TTL (s)", "frontend_type": "number", "frontend_readonly": False, "frontend_required": False},
)
isActive: Optional[bool] = Field(
default=True,
description="Whether this connection is active",
json_schema_extra={"label": "Aktiv", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
)
lastConnectedAt: Optional[float] = Field(
default=None,
description="Timestamp of the last successful whoAmI() call",
json_schema_extra={"label": "Letzte Verbindung", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
# ---- Sync state (incremental ticket mirror) ---------------------------
lastSyncAt: Optional[float] = Field(
default=None,
description="UTC timestamp of the last successful (incremental) mirror sync",
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
lastFullSyncAt: Optional[float] = Field(
default=None,
description="UTC timestamp of the last full mirror sync (force=true)",
json_schema_extra={"label": "Letzter Full-Sync", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
lastSyncDurationMs: Optional[int] = Field(
default=None,
description="Duration of the last sync in milliseconds",
json_schema_extra={"label": "Letzte Sync-Dauer (ms)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
)
lastSyncTicketCount: Optional[int] = Field(
default=None,
description="Number of tickets upserted in the last sync",
json_schema_extra={"label": "Tickets im letzten Sync", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
)
lastSyncErrorAt: Optional[float] = Field(
default=None,
description="UTC timestamp of the last failed sync",
json_schema_extra={"label": "Letzter Sync-Fehler", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
lastSyncErrorMessage: Optional[str] = Field(
default=None,
description="Error message of the last failed sync",
json_schema_extra={"label": "Letzter Fehler", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
@i18nModel("Redmine-Ticket (Mirror)")
class RedmineTicketMirror(PowerOnModel):
"""Local mirror of a Redmine issue.
Composite uniqueness: ``(featureInstanceId, redmineId)``. We do not
enforce it via a DB constraint -- the sync logic looks up by these
two columns and does an upsert.
"""
@model_validator(mode="before")
@classmethod
def _applyDefaults(cls, values):
return _coerceNoneToDefaults(cls, values)
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Primary key",
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
featureInstanceId: str = Field(
description="FK -> FeatureInstance.id",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
)
mandateId: Optional[str] = Field(
default=None,
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
redmineId: int = Field(
description="Redmine issue id (unique per feature instance)",
json_schema_extra={"label": "Redmine-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
)
subject: str = Field(default="", json_schema_extra={"label": "Titel", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
description: str = Field(default="", json_schema_extra={"label": "Beschreibung", "frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
trackerId: Optional[int] = Field(default=None, json_schema_extra={"label": "Tracker-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
trackerName: Optional[str] = Field(default=None, json_schema_extra={"label": "Tracker", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
statusId: Optional[int] = Field(default=None, json_schema_extra={"label": "Status-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
statusName: Optional[str] = Field(default=None, json_schema_extra={"label": "Status", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
isClosed: bool = Field(default=False, json_schema_extra={"label": "Geschlossen", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False})
priorityId: Optional[int] = Field(default=None, json_schema_extra={"label": "Prio-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
priorityName: Optional[str] = Field(default=None, json_schema_extra={"label": "Prioritaet", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
assignedToId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zuweisung-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
assignedToName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zuweisung", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
authorId: Optional[int] = Field(default=None, json_schema_extra={"label": "Autor-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
authorName: Optional[str] = Field(default=None, json_schema_extra={"label": "Autor", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
parentId: Optional[int] = Field(default=None, json_schema_extra={"label": "Parent-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
fixedVersionId: Optional[int] = Field(default=None, json_schema_extra={"label": "Zielversion-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
fixedVersionName: Optional[str] = Field(default=None, json_schema_extra={"label": "Zielversion", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
categoryId: Optional[int] = Field(default=None, json_schema_extra={"label": "Kategorie-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False})
categoryName: Optional[str] = Field(default=None, json_schema_extra={"label": "Kategorie", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
closedOnTs: Optional[float] = Field(
default=None,
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
)
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
customFields: Optional[List[Dict[str, Any]]] = Field(
default=None,
description="List of {id,name,value} as returned by Redmine; stored as JSON",
json_schema_extra={"label": "Custom Fields", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False},
)
raw: Optional[Dict[str, Any]] = Field(
default=None,
description="Original Redmine issue payload (full)",
json_schema_extra={"label": "Roh-Payload", "frontend_type": "json", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
)
syncedAt: Optional[float] = Field(
default=None,
description="UTC epoch when this row was last upserted from Redmine",
json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
@i18nModel("Redmine-Beziehung (Mirror)")
class RedmineRelationMirror(PowerOnModel):
"""Local mirror of a Redmine issue relation.
Composite uniqueness: ``(featureInstanceId, redmineRelationId)``.
"""
@model_validator(mode="before")
@classmethod
def _applyDefaults(cls, values):
return _coerceNoneToDefaults(cls, values)
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
featureInstanceId: str = Field(
description="FK -> FeatureInstance.id",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
)
redmineRelationId: int = Field(
description="Redmine relation id (unique per feature instance)",
json_schema_extra={"label": "Relation-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
)
issueId: int = Field(
description="Source issue id (issue.id from Redmine)",
json_schema_extra={"label": "Source-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
)
issueToId: int = Field(
description="Target issue id (issue_to_id from Redmine)",
json_schema_extra={"label": "Target-Issue-ID", "frontend_type": "number", "frontend_readonly": True, "frontend_required": True},
)
relationType: str = Field(
default="relates",
json_schema_extra={"label": "Beziehungstyp", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
delay: Optional[int] = Field(
default=None,
json_schema_extra={"label": "Verzoegerung (Tage)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False},
)
syncedAt: Optional[float] = Field(
default=None,
json_schema_extra={"label": "Synced At", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False},
)
# ---------------------------------------------------------------------------
# Transport DTOs (not persisted)
# ---------------------------------------------------------------------------
class RedmineRelationDto(BaseModel):
id: int = Field(description="Relation id")
issueId: int = Field(description="Source issue id (issue.id from Redmine)")
issueToId: int = Field(description="Target issue id (issue_to_id from Redmine)")
relationType: str = Field(description="relates | precedes | follows | blocks | blocked | duplicates | duplicated | copied_to | copied_from | parent")
delay: Optional[int] = Field(default=None, description="Delay in days (precedes/follows only)")
class RedmineCustomFieldValueDto(BaseModel):
id: int
name: str
value: Any = None
class RedmineTicketDto(BaseModel):
"""Normalised Redmine issue used by the UI and the AI tools."""
id: int = Field(description="Redmine issue id")
subject: str = Field(default="")
description: str = Field(default="")
trackerId: Optional[int] = None
trackerName: Optional[str] = None
statusId: Optional[int] = None
statusName: Optional[str] = None
isClosed: bool = False
priorityId: Optional[int] = None
priorityName: Optional[str] = None
assignedToId: Optional[int] = None
assignedToName: Optional[str] = None
authorId: Optional[int] = None
authorName: Optional[str] = None
parentId: Optional[int] = None
fixedVersionId: Optional[int] = None
fixedVersionName: Optional[str] = None
categoryId: Optional[int] = None
categoryName: Optional[str] = None
createdOn: Optional[str] = None
updatedOn: Optional[str] = None
customFields: List[RedmineCustomFieldValueDto] = Field(default_factory=list)
relations: List[RedmineRelationDto] = Field(default_factory=list)
raw: Optional[Dict[str, Any]] = None
class RedmineFieldChoiceDto(BaseModel):
id: int
name: str
isClosed: Optional[bool] = Field(default=None, description="Status only: closed-state flag")
class RedmineCustomFieldSchemaDto(BaseModel):
id: int
name: str
fieldFormat: str = Field(default="string")
isRequired: bool = False
possibleValues: List[str] = Field(default_factory=list)
multiple: bool = False
defaultValue: Optional[str] = None
class RedmineFieldSchemaDto(BaseModel):
"""Project meta returned by ``getProjectMeta``."""
projectId: str
projectName: str = ""
trackers: List[RedmineFieldChoiceDto] = Field(default_factory=list)
statuses: List[RedmineFieldChoiceDto] = Field(default_factory=list)
priorities: List[RedmineFieldChoiceDto] = Field(default_factory=list)
users: List[RedmineFieldChoiceDto] = Field(default_factory=list)
categories: List[RedmineFieldChoiceDto] = Field(
default_factory=list,
description="Per-project Redmine issue categories. Empty if the project has none defined or if the API key is not allowed to list them.",
)
customFields: List[RedmineCustomFieldSchemaDto] = Field(default_factory=list)
rootTrackerName: str = "Userstory"
rootTrackerId: Optional[int] = Field(default=None, description="Resolved id of the configured rootTrackerName, or None if no matching tracker exists")
# ---------------------------------------------------------------------------
# Stats DTO -- raw buckets, mapped to ReportSection in the frontend
# ---------------------------------------------------------------------------
class RedmineStatsKpis(BaseModel):
total: int = 0
open: int = 0
closed: int = 0
closedInPeriod: int = 0
createdInPeriod: int = 0
orphans: int = 0
class RedmineStatusByTrackerEntry(BaseModel):
trackerId: Optional[int] = None
trackerName: str = ""
countsByStatus: Dict[str, int] = Field(default_factory=dict)
total: int = 0
class RedmineAssigneeBucket(BaseModel):
assignedToId: Optional[int] = None
name: str = "(nicht zugewiesen)"
open: int = 0
class RedmineRelationDistributionEntry(BaseModel):
relationType: str
count: int = 0
class RedmineAgingBucket(BaseModel):
bucketKey: str
label: str
minDays: int
maxDays: Optional[int] = None
count: int = 0
class RedmineThroughputBucket(BaseModel):
"""Per-bucket snapshot used by the Stats page.
``created`` / ``closed`` keep the per-bucket flow numbers (still useful
for callers that want raw deltas), while ``cumTotal`` / ``cumOpen``
expose the cumulative snapshot the UI actually plots:
- ``cumTotal`` = number of tickets that exist as of the END of this
bucket (= count of tickets created on or before bucket end).
- ``cumOpen`` = of those, how many are still open at bucket end (i.e.
not yet closed).
"""
bucketKey: str
label: str
created: int = 0
closed: int = 0
cumTotal: int = 0
cumOpen: int = 0
class RedmineStatsDto(BaseModel):
"""All sections needed by the Statistics page in one round-trip."""
instanceId: str
dateFrom: Optional[str] = None
dateTo: Optional[str] = None
bucket: str = "week"
trackerIds: List[int] = Field(default_factory=list)
categoryIds: List[int] = Field(default_factory=list)
statusFilter: str = "*"
kpis: RedmineStatsKpis = Field(default_factory=RedmineStatsKpis)
statusByTracker: List[RedmineStatusByTrackerEntry] = Field(default_factory=list)
throughput: List[RedmineThroughputBucket] = Field(default_factory=list)
topAssignees: List[RedmineAssigneeBucket] = Field(default_factory=list)
relationDistribution: List[RedmineRelationDistributionEntry] = Field(default_factory=list)
backlogAging: List[RedmineAgingBucket] = Field(default_factory=list)
# ---------------------------------------------------------------------------
# Sync DTO
# ---------------------------------------------------------------------------
class RedmineSyncResultDto(BaseModel):
instanceId: str
full: bool = Field(description="True if a full sync was performed (no incremental cursor)")
ticketsUpserted: int = 0
relationsUpserted: int = 0
durationMs: int = 0
lastSyncAt: float
error: Optional[str] = None
class RedmineSyncStatusDto(BaseModel):
instanceId: str
lastSyncAt: Optional[float] = None
lastFullSyncAt: Optional[float] = None
lastSyncDurationMs: Optional[int] = None
lastSyncTicketCount: Optional[int] = None
lastSyncErrorAt: Optional[float] = None
lastSyncErrorMessage: Optional[str] = None
mirroredTicketCount: int = 0
mirroredRelationCount: int = 0
# ---------------------------------------------------------------------------
# Request bodies
# ---------------------------------------------------------------------------
class RedmineConfigUpdateRequest(BaseModel):
"""PUT body for the config endpoint. Fields are all optional -- only
provided ones are updated. ``apiKey`` is encrypted before persistence."""
baseUrl: Optional[str] = None
projectId: Optional[str] = None
apiKey: Optional[str] = Field(default=None, description="Plain api key; will be encrypted server-side")
rootTrackerName: Optional[str] = None
defaultPeriodValue: Optional[Dict[str, Any]] = None
schemaCacheTtlSeconds: Optional[int] = None
isActive: Optional[bool] = None
class RedmineConfigDto(BaseModel):
"""Frontend-safe view of the config (no plain api key)."""
id: Optional[str] = None
featureInstanceId: str
mandateId: Optional[str] = None
baseUrl: str = ""
projectId: str = ""
hasApiKey: bool = False
rootTrackerName: str = "Userstory"
defaultPeriodValue: Optional[Dict[str, Any]] = None
schemaCacheTtlSeconds: int = 24 * 60 * 60
schemaCachedAt: Optional[float] = None
isActive: bool = True
lastConnectedAt: Optional[float] = None
lastSyncAt: Optional[float] = None
lastFullSyncAt: Optional[float] = None
lastSyncTicketCount: Optional[int] = None
lastSyncErrorMessage: Optional[str] = None
class RedmineTicketUpdateRequest(BaseModel):
"""Body for ``PUT /tickets/{id}``."""
subject: Optional[str] = None
description: Optional[str] = None
trackerId: Optional[int] = None
statusId: Optional[int] = None
priorityId: Optional[int] = None
assignedToId: Optional[int] = None
parentIssueId: Optional[int] = None
fixedVersionId: Optional[int] = None
notes: Optional[str] = None
customFields: Optional[Dict[int, Any]] = None
class RedmineTicketCreateRequest(BaseModel):
"""Body for ``POST /tickets``."""
subject: str
trackerId: int
description: Optional[str] = ""
statusId: Optional[int] = None
priorityId: Optional[int] = None
assignedToId: Optional[int] = None
parentIssueId: Optional[int] = None
fixedVersionId: Optional[int] = None
customFields: Optional[Dict[int, Any]] = None
class RedmineRelationCreateRequest(BaseModel):
"""Body for ``POST /tickets/{id}/relations``."""
issueToId: int
relationType: str = Field(default="relates")
delay: Optional[int] = None

View file

@ -0,0 +1,449 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Interface for the Redmine feature.
Owns:
- Database connection to ``poweron_redmine``
- CRUD on ``RedmineInstanceConfig`` (one row per FeatureInstance)
- Encryption of the API key (``encryptValue`` keyed ``"redmineApiKey"``)
- Resolution of the active config to a ``ConnectorTicketsRedmine`` instance
"""
from __future__ import annotations
import logging
import time
from typing import Any, Dict, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.connectors.connectorTicketsRedmine import ConnectorTicketsRedmine
from modules.datamodels.datamodelUam import User
from modules.features.redmine.datamodelRedmine import (
RedmineConfigDto,
RedmineConfigUpdateRequest,
RedmineInstanceConfig,
RedmineRelationMirror,
RedmineTicketMirror,
)
from modules.security.rbac import RbacClass
from modules.shared.configuration import APP_CONFIG, decryptValue, encryptValue
from modules.shared.dbRegistry import registerDatabase
logger = logging.getLogger(__name__)
redmineDatabase = "poweron_redmine"
registerDatabase(redmineDatabase)
_redmineInterfaces: Dict[str, "RedmineObjects"] = {}
class RedmineObjects:
"""Per-user, per-instance Redmine interface."""
def __init__(
self,
currentUser: User,
mandateId: Optional[str] = None,
featureInstanceId: Optional[str] = None,
) -> None:
self.currentUser = currentUser
self.userId = currentUser.id if currentUser else None
self.mandateId = mandateId
self.featureInstanceId = featureInstanceId
self._initializeDatabase()
from modules.security.rootAccess import getRootDbAppConnector
dbApp = getRootDbAppConnector()
self.rbac = RbacClass(self.db, dbApp=dbApp)
self.db.updateContext(self.userId)
# ------------------------------------------------------------------
# DB bootstrap
# ------------------------------------------------------------------
def _initializeDatabase(self) -> None:
self.db = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "_no_config_default_data"),
dbDatabase=redmineDatabase,
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
userId=self.userId,
)
logger.debug(f"Redmine database initialized for user {self.userId}")
def setUserContext(
self,
currentUser: User,
mandateId: Optional[str] = None,
featureInstanceId: Optional[str] = None,
) -> None:
self.currentUser = currentUser
self.userId = currentUser.id if currentUser else None
self.mandateId = mandateId
self.featureInstanceId = featureInstanceId
self.db.updateContext(self.userId)
# ------------------------------------------------------------------
# Config CRUD
# ------------------------------------------------------------------
def _findConfigRecord(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
records = self.db.getRecordset(
RedmineInstanceConfig,
recordFilter={"featureInstanceId": featureInstanceId},
)
if not records:
return None
return dict(records[0])
def getConfig(self, featureInstanceId: str) -> Optional[RedmineInstanceConfig]:
record = self._findConfigRecord(featureInstanceId)
if not record:
return None
return RedmineInstanceConfig(**{k: v for k, v in record.items() if not k.startswith("_")})
def getConfigDto(self, featureInstanceId: str) -> RedmineConfigDto:
cfg = self.getConfig(featureInstanceId)
if not cfg:
return RedmineConfigDto(
featureInstanceId=featureInstanceId,
mandateId=self.mandateId,
)
return RedmineConfigDto(
id=cfg.id,
featureInstanceId=cfg.featureInstanceId,
mandateId=cfg.mandateId,
baseUrl=cfg.baseUrl or "",
projectId=cfg.projectId or "",
hasApiKey=bool(cfg.encryptedApiKey),
rootTrackerName=cfg.rootTrackerName or "Userstory",
defaultPeriodValue=cfg.defaultPeriodValue,
schemaCacheTtlSeconds=cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60,
schemaCachedAt=cfg.schemaCachedAt,
isActive=cfg.isActive if cfg.isActive is not None else True,
lastConnectedAt=cfg.lastConnectedAt,
lastSyncAt=cfg.lastSyncAt,
lastFullSyncAt=cfg.lastFullSyncAt,
lastSyncTicketCount=cfg.lastSyncTicketCount,
lastSyncErrorMessage=cfg.lastSyncErrorMessage,
)
def upsertConfig(
self,
featureInstanceId: str,
update: RedmineConfigUpdateRequest,
) -> RedmineConfigDto:
existing = self._findConfigRecord(featureInstanceId)
data: Dict[str, Any] = {}
if update.baseUrl is not None:
data["baseUrl"] = update.baseUrl.strip().rstrip("/")
if update.projectId is not None:
data["projectId"] = update.projectId.strip()
if update.rootTrackerName is not None:
cleaned = update.rootTrackerName.strip()
if not cleaned:
raise ValueError("rootTrackerName must not be empty")
data["rootTrackerName"] = cleaned
if update.defaultPeriodValue is not None:
data["defaultPeriodValue"] = update.defaultPeriodValue
if update.schemaCacheTtlSeconds is not None:
data["schemaCacheTtlSeconds"] = int(update.schemaCacheTtlSeconds)
if update.isActive is not None:
data["isActive"] = bool(update.isActive)
if update.apiKey is not None:
apiKey = update.apiKey.strip()
if apiKey == "":
data["encryptedApiKey"] = ""
else:
data["encryptedApiKey"] = encryptValue(
apiKey,
userId=self.userId or "system",
keyName="redmineApiKey",
)
if existing:
self.db.recordModify(RedmineInstanceConfig, existing["id"], data)
else:
seed = RedmineInstanceConfig(
featureInstanceId=featureInstanceId,
mandateId=self.mandateId,
).model_dump()
seed.update(data)
self.db.recordCreate(RedmineInstanceConfig, seed)
return self.getConfigDto(featureInstanceId)
def markConfigInvalid(self, featureInstanceId: str, reason: str = "") -> None:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return
self.db.recordModify(
RedmineInstanceConfig,
existing["id"],
{"lastConnectedAt": None},
)
if reason:
logger.warning(f"Redmine config {featureInstanceId} invalidated: {reason}")
def markConfigConnected(self, featureInstanceId: str) -> None:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return
self.db.recordModify(
RedmineInstanceConfig,
existing["id"],
{"lastConnectedAt": time.time()},
)
def updateSchemaCache(self, featureInstanceId: str, schema: Dict[str, Any]) -> None:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return
self.db.recordModify(
RedmineInstanceConfig,
existing["id"],
{"schemaCache": schema, "schemaCachedAt": time.time()},
)
# ------------------------------------------------------------------
# Connector resolution
# ------------------------------------------------------------------
def _decryptApiKey(self, encryptedApiKey: str) -> str:
if not encryptedApiKey:
return ""
try:
return decryptValue(
encryptedApiKey,
userId=self.userId or "system",
keyName="redmineApiKey",
)
except Exception as e:
logger.error(f"Failed to decrypt Redmine api key: {e}")
return ""
def resolveConnector(
self, featureInstanceId: str
) -> Optional[ConnectorTicketsRedmine]:
cfg = self.getConfig(featureInstanceId)
if not cfg or not cfg.isActive:
return None
if not cfg.baseUrl or not cfg.projectId or not cfg.encryptedApiKey:
return None
apiKey = self._decryptApiKey(cfg.encryptedApiKey)
if not apiKey:
return None
return ConnectorTicketsRedmine(
baseUrl=cfg.baseUrl,
apiKey=apiKey,
projectId=cfg.projectId,
)
def deleteConfig(self, featureInstanceId: str) -> bool:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return False
self.db.recordDelete(RedmineInstanceConfig, existing["id"])
return True
# ------------------------------------------------------------------
# Sync state
# ------------------------------------------------------------------
def recordSyncSuccess(
self,
featureInstanceId: str,
*,
full: bool,
ticketsUpserted: int,
durationMs: int,
lastSyncAt: float,
) -> None:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return
update: Dict[str, Any] = {
"lastSyncAt": float(lastSyncAt),
"lastSyncDurationMs": int(durationMs),
"lastSyncTicketCount": int(ticketsUpserted),
"lastSyncErrorAt": None,
"lastSyncErrorMessage": None,
}
if full:
update["lastFullSyncAt"] = float(lastSyncAt)
self.db.recordModify(RedmineInstanceConfig, existing["id"], update)
def recordSyncFailure(self, featureInstanceId: str, message: str) -> None:
existing = self._findConfigRecord(featureInstanceId)
if not existing:
return
self.db.recordModify(
RedmineInstanceConfig,
existing["id"],
{
"lastSyncErrorAt": time.time(),
"lastSyncErrorMessage": message[:1000] if message else "unknown error",
},
)
# ------------------------------------------------------------------
# Ticket mirror CRUD
# ------------------------------------------------------------------
def _findMirroredTicket(
self, featureInstanceId: str, redmineId: int
) -> Optional[Dict[str, Any]]:
records = self.db.getRecordset(
RedmineTicketMirror,
recordFilter={"featureInstanceId": featureInstanceId, "redmineId": int(redmineId)},
)
if not records:
return None
return dict(records[0])
def upsertMirroredTicket(
self,
featureInstanceId: str,
redmineId: int,
record: Dict[str, Any],
) -> str:
existing = self._findMirroredTicket(featureInstanceId, redmineId)
if existing:
update = {k: v for k, v in record.items() if k not in {"id"}}
self.db.recordModify(RedmineTicketMirror, existing["id"], update)
return existing["id"]
else:
new = self.db.recordCreate(RedmineTicketMirror, record)
return new.get("id") if isinstance(new, dict) else record.get("id")
def deleteMirroredTicket(self, featureInstanceId: str, redmineId: int) -> bool:
existing = self._findMirroredTicket(featureInstanceId, redmineId)
if not existing:
return False
self.db.recordDelete(RedmineTicketMirror, existing["id"])
return True
def listMirroredTickets(
self,
featureInstanceId: str,
*,
trackerIds: Optional[list] = None,
statusIds: Optional[list] = None,
assigneeId: Optional[int] = None,
updatedFromTs: Optional[float] = None,
updatedToTs: Optional[float] = None,
) -> list:
recordFilter: Dict[str, Any] = {"featureInstanceId": featureInstanceId}
records = self.db.getRecordset(RedmineTicketMirror, recordFilter=recordFilter)
out = []
for r in records or []:
d = dict(r)
if trackerIds and d.get("trackerId") not in trackerIds:
continue
if statusIds and d.get("statusId") not in statusIds:
continue
if assigneeId is not None and d.get("assignedToId") != assigneeId:
continue
uts = d.get("updatedOnTs")
if updatedFromTs is not None and (uts is None or uts < updatedFromTs):
continue
if updatedToTs is not None and (uts is None or uts > updatedToTs):
continue
out.append(d)
return out
def countMirroredTickets(self, featureInstanceId: str) -> int:
records = self.db.getRecordset(
RedmineTicketMirror,
recordFilter={"featureInstanceId": featureInstanceId},
)
return len(records or [])
# ------------------------------------------------------------------
# Relation mirror CRUD
# ------------------------------------------------------------------
def insertMirroredRelation(self, featureInstanceId: str, record: Dict[str, Any]) -> None:
self.db.recordCreate(RedmineRelationMirror, record)
def deleteMirroredRelationsForIssue(self, featureInstanceId: str, issueId: int) -> int:
records_a = self.db.getRecordset(
RedmineRelationMirror,
recordFilter={"featureInstanceId": featureInstanceId, "issueId": int(issueId)},
) or []
records_b = self.db.getRecordset(
RedmineRelationMirror,
recordFilter={"featureInstanceId": featureInstanceId, "issueToId": int(issueId)},
) or []
deleted = 0
seen = set()
for r in list(records_a) + list(records_b):
rid = r.get("id")
if not rid or rid in seen:
continue
seen.add(rid)
self.db.recordDelete(RedmineRelationMirror, rid)
deleted += 1
return deleted
def listMirroredRelations(self, featureInstanceId: str) -> list:
records = self.db.getRecordset(
RedmineRelationMirror,
recordFilter={"featureInstanceId": featureInstanceId},
)
return [dict(r) for r in (records or [])]
def countMirroredRelations(self, featureInstanceId: str) -> int:
return len(self.db.getRecordset(
RedmineRelationMirror,
recordFilter={"featureInstanceId": featureInstanceId},
) or [])
def deleteMirroredRelationByRedmineId(
self, featureInstanceId: str, redmineRelationId: int
) -> bool:
records = self.db.getRecordset(
RedmineRelationMirror,
recordFilter={"featureInstanceId": featureInstanceId, "redmineRelationId": int(redmineRelationId)},
)
if not records:
return False
self.db.recordDelete(RedmineRelationMirror, records[0]["id"])
return True
def getInterface(
currentUser: Optional[User] = None,
mandateId: Optional[str] = None,
featureInstanceId: Optional[str] = None,
) -> RedmineObjects:
if not currentUser:
raise ValueError("Invalid user context: user is required")
effectiveMandateId = str(mandateId) if mandateId else None
effectiveFeatureInstanceId = str(featureInstanceId) if featureInstanceId else None
contextKey = (
f"redmine_{effectiveMandateId}_{effectiveFeatureInstanceId}_{currentUser.id}"
)
if contextKey not in _redmineInterfaces:
_redmineInterfaces[contextKey] = RedmineObjects(
currentUser,
mandateId=effectiveMandateId,
featureInstanceId=effectiveFeatureInstanceId,
)
else:
_redmineInterfaces[contextKey].setUserContext(
currentUser,
mandateId=effectiveMandateId,
featureInstanceId=effectiveFeatureInstanceId,
)
return _redmineInterfaces[contextKey]

View file

@ -0,0 +1,335 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine Feature Container -- Main Module.
Defines the feature metadata and registers RBAC objects + template roles
in the catalog. Loaded automatically by ``modules.system.registry``.
"""
from __future__ import annotations
import logging
from typing import Any, Dict, List
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
FEATURE_CODE = "redmine"
FEATURE_LABEL = t("Redmine", context="UI")
FEATURE_ICON = "mdi-bug-outline"
# Wrapping labels in t() at import time registers the keys with the i18n
# catalog immediately, so the AI translator picks them up on the next sweep.
# Without this, brand-new labels like "Ticket-Browser" stay untranslated and
# render as ``[Ticket-Browser]`` in non-de UIs.
UI_OBJECTS: List[Dict[str, Any]] = [
{"objectKey": "ui.feature.redmine.stats", "label": t("Statistik", context="UI"), "meta": {"area": "stats", "isDefault": True}},
{"objectKey": "ui.feature.redmine.browser", "label": t("Ticket-Browser", context="UI"), "meta": {"area": "browser"}},
{"objectKey": "ui.feature.redmine.settings", "label": t("Einstellungen", context="UI"), "meta": {"area": "settings", "admin_only": True}},
]
DATA_OBJECTS: List[Dict[str, Any]] = [
{"objectKey": "data.feature.redmine.config", "label": t("Konfiguration", context="UI"), "meta": {"isGroup": True}},
{
"objectKey": "data.feature.redmine.RedmineInstanceConfig",
"label": t("Redmine-Verbindung", context="UI"),
"meta": {
"table": "RedmineInstanceConfig",
"group": "data.feature.redmine.config",
"fields": ["id", "baseUrl", "projectId", "rootTrackerName", "isActive", "lastConnectedAt", "lastSyncAt"],
},
},
{
"objectKey": "data.feature.redmine.RedmineTicketMirror",
"label": t("Redmine-Tickets (Mirror)", context="UI"),
"meta": {
"table": "RedmineTicketMirror",
"group": "data.feature.redmine.config",
"fields": ["redmineId", "subject", "trackerName", "statusName", "assignedToName", "updatedOn"],
},
},
{
"objectKey": "data.feature.redmine.RedmineRelationMirror",
"label": t("Redmine-Beziehungen (Mirror)", context="UI"),
"meta": {
"table": "RedmineRelationMirror",
"group": "data.feature.redmine.config",
"fields": ["redmineRelationId", "issueId", "issueToId", "relationType"],
},
},
{
"objectKey": "data.feature.redmine.*",
"label": t("Alle Redmine-Daten", context="UI"),
"meta": {"wildcard": True, "description": "Wildcard for all redmine data tables"},
},
]
RESOURCE_OBJECTS: List[Dict[str, Any]] = [
{
"objectKey": "resource.feature.redmine.tickets.read",
"label": t("Tickets lesen", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "GET"},
},
{
"objectKey": "resource.feature.redmine.tickets.create",
"label": t("Tickets erstellen", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets", "method": "POST"},
},
{
"objectKey": "resource.feature.redmine.tickets.update",
"label": t("Tickets bearbeiten", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "PUT"},
},
{
"objectKey": "resource.feature.redmine.tickets.delete",
"label": t("Tickets loeschen / archivieren", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}", "method": "DELETE"},
},
{
"objectKey": "resource.feature.redmine.relations.manage",
"label": t("Beziehungen verwalten", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/tickets/{issueId}/relations", "method": "ALL"},
},
{
"objectKey": "resource.feature.redmine.stats.read",
"label": t("Statistik einsehen", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/stats", "method": "GET"},
},
{
"objectKey": "resource.feature.redmine.config.manage",
"label": t("Verbindung verwalten", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/config", "method": "ALL", "admin_only": True},
},
{
"objectKey": "resource.feature.redmine.config.test",
"label": t("Verbindung testen", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/config/test", "method": "POST", "admin_only": True},
},
{
"objectKey": "resource.feature.redmine.sync.run",
"label": t("Mirror synchronisieren", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/sync", "method": "POST", "admin_only": True},
},
{
"objectKey": "resource.feature.redmine.sync.status",
"label": t("Sync-Status lesen", context="UI"),
"meta": {"endpoint": "/api/redmine/{instanceId}/sync/status", "method": "GET"},
},
{
"objectKey": "resource.feature.redmine.workflows.view",
"label": t("Workflows einsehen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"},
},
{
"objectKey": "resource.feature.redmine.workflows.execute",
"label": t("Workflows ausfuehren", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"},
},
]
TEMPLATE_ROLES: List[Dict[str, Any]] = [
{
"roleLabel": "redmine-viewer",
"description": "Redmine-Betrachter -- Tickets und Statistik lesen",
"accessRules": [
{"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
{"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
],
},
{
"roleLabel": "redmine-editor",
"description": "Redmine-Bearbeiter -- Tickets erstellen, bearbeiten, Beziehungen pflegen",
"accessRules": [
{"context": "UI", "item": "ui.feature.redmine.stats", "view": True},
{"context": "UI", "item": "ui.feature.redmine.browser", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.read", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.stats.read", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.sync.status", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
{"context": "DATA", "item": None, "view": True, "read": "g", "create": "g", "update": "g", "delete": "n"},
],
},
{
"roleLabel": "redmine-admin",
"description": "Redmine-Administrator -- Vollzugriff inkl. Einstellungen und Verbindung",
"accessRules": [
{"context": "UI", "item": None, "view": True},
{"context": "DATA", "item": None, "view": True, "read": "a", "create": "a", "update": "a", "delete": "a"},
{"context": "RESOURCE", "item": "resource.feature.redmine.config.manage", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.config.test", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.create", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.update", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.tickets.delete", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.relations.manage", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.view", "view": True},
{"context": "RESOURCE", "item": "resource.feature.redmine.workflows.execute", "view": True},
],
},
]
# ---------------------------------------------------------------------------
# Public discovery API (called by registry.py)
# ---------------------------------------------------------------------------
def getFeatureDefinition() -> Dict[str, Any]:
return {"code": FEATURE_CODE, "label": FEATURE_LABEL, "icon": FEATURE_ICON}
def getUiObjects() -> List[Dict[str, Any]]:
return UI_OBJECTS
def getResourceObjects() -> List[Dict[str, Any]]:
return RESOURCE_OBJECTS
def getDataObjects() -> List[Dict[str, Any]]:
return DATA_OBJECTS
def getTemplateRoles() -> List[Dict[str, Any]]:
return TEMPLATE_ROLES
def getTemplateWorkflows() -> List[Dict[str, Any]]:
return []
def registerFeature(catalogService) -> bool:
"""Register UI / Resource / Data objects and sync template roles."""
try:
for uiObj in UI_OBJECTS:
catalogService.registerUiObject(
featureCode=FEATURE_CODE,
objectKey=uiObj["objectKey"],
label=uiObj["label"],
meta=uiObj.get("meta"),
)
for resObj in RESOURCE_OBJECTS:
catalogService.registerResourceObject(
featureCode=FEATURE_CODE,
objectKey=resObj["objectKey"],
label=resObj["label"],
meta=resObj.get("meta"),
)
for dataObj in DATA_OBJECTS:
catalogService.registerDataObject(
featureCode=FEATURE_CODE,
objectKey=dataObj["objectKey"],
label=dataObj["label"],
meta=dataObj.get("meta"),
)
_syncTemplateRolesToDb()
logger.info(
f"Feature '{FEATURE_CODE}' registered "
f"{len(UI_OBJECTS)} UI, {len(RESOURCE_OBJECTS)} resource, {len(DATA_OBJECTS)} data objects"
)
return True
except Exception as e:
logger.error(f"Failed to register feature '{FEATURE_CODE}': {e}")
return False
# ---------------------------------------------------------------------------
# Template-role sync (mirrors the trustee implementation)
# ---------------------------------------------------------------------------
def _syncTemplateRolesToDb() -> int:
try:
from modules.datamodels.datamodelRbac import (
AccessRule,
AccessRuleContext,
Role,
)
from modules.datamodels.datamodelUtils import coerce_text_multilingual
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
existingRoles = rootInterface.getRolesByFeatureCode(FEATURE_CODE)
templateRoles = [r for r in existingRoles if r.mandateId is None]
existingByLabel: Dict[str, str] = {r.roleLabel: str(r.id) for r in templateRoles}
createdCount = 0
for roleTemplate in TEMPLATE_ROLES:
roleLabel = roleTemplate["roleLabel"]
if roleLabel in existingByLabel:
_ensureAccessRulesForRole(
rootInterface,
existingByLabel[roleLabel],
roleTemplate.get("accessRules", []),
)
continue
newRole = Role(
roleLabel=roleLabel,
description=coerce_text_multilingual(roleTemplate.get("description", {})),
featureCode=FEATURE_CODE,
mandateId=None,
featureInstanceId=None,
isSystemRole=False,
)
createdRole = rootInterface.db.recordCreate(Role, newRole.model_dump())
roleId = createdRole.get("id")
_ensureAccessRulesForRole(rootInterface, roleId, roleTemplate.get("accessRules", []))
logger.info(f"Created template role '{roleLabel}' with ID {roleId}")
createdCount += 1
return createdCount
except Exception as e:
logger.error(f"Error syncing template roles for feature '{FEATURE_CODE}': {e}")
return 0
def _ensureAccessRulesForRole(
rootInterface, roleId: str, ruleTemplates: List[Dict[str, Any]]
) -> int:
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
existingRules = rootInterface.getAccessRulesByRole(roleId)
existingSignatures: set[Any] = set()
for rule in existingRules:
sig = (rule.context.value if rule.context else None, rule.item)
existingSignatures.add(sig)
createdCount = 0
for template in ruleTemplates:
context = template.get("context", "UI")
item = template.get("item")
if (context, item) in existingSignatures:
continue
if context == "UI":
contextEnum = AccessRuleContext.UI
elif context == "DATA":
contextEnum = AccessRuleContext.DATA
elif context == "RESOURCE":
contextEnum = AccessRuleContext.RESOURCE
else:
contextEnum = context
newRule = AccessRule(
roleId=roleId,
context=contextEnum,
item=item,
view=template.get("view", False),
read=template.get("read"),
create=template.get("create"),
update=template.get("update"),
delete=template.get("delete"),
)
rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
createdCount += 1
return createdCount

View file

@ -0,0 +1,482 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""FastAPI routes for the Redmine feature.
URL pattern: ``/api/redmine/{instanceId}/...`` -- mirrors the Trustee /
CommCoach pattern. Every endpoint validates that the feature instance
exists and resolves its ``mandateId``. Audit log is written for every
write call.
"""
import logging
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Body, Depends, HTTPException, Query, Request
from modules.auth import RequestContext, getRequestContext, limiter
from modules.features.redmine import interfaceFeatureRedmine as interfaceDb
from modules.features.redmine import (
serviceRedmine,
serviceRedmineStats,
serviceRedmineSync,
)
from modules.features.redmine.datamodelRedmine import (
RedmineConfigDto,
RedmineConfigUpdateRequest,
RedmineFieldSchemaDto,
RedmineRelationCreateRequest,
RedmineStatsDto,
RedmineSyncResultDto,
RedmineSyncStatusDto,
RedmineTicketCreateRequest,
RedmineTicketDto,
RedmineTicketUpdateRequest,
)
from modules.features.redmine.serviceRedmine import RedmineNotConfiguredError
from modules.connectors.connectorTicketsRedmine import RedmineApiError
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeFeatureRedmine")
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/api/redmine",
tags=["Redmine"],
responses={404: {"description": "Not found"}},
)
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _audit(
context: RequestContext,
action: str,
resourceType: Optional[str] = None,
resourceId: Optional[str] = None,
details: str = "",
success: bool = True,
errorMessage: Optional[str] = None,
) -> None:
try:
from modules.shared.auditLogger import audit_logger
audit_logger.logEvent(
userId=str(context.user.id),
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=getattr(context, "featureInstanceId", None),
category="redmine",
action=action,
resourceType=resourceType,
resourceId=resourceId,
details=details,
success=success,
errorMessage=errorMessage,
)
except Exception as e:
logger.debug(f"Redmine audit log failed: {e}")
def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
"""Returns the resolved ``mandateId`` for the instance."""
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
instance = featureInterface.getFeatureInstance(instanceId)
if not instance:
raise HTTPException(
status_code=404,
detail=routeApiMsg(f"Feature instance '{instanceId}' not found"),
)
mandateId = (
instance.get("mandateId")
if isinstance(instance, dict)
else getattr(instance, "mandateId", None)
)
if not mandateId:
raise HTTPException(
status_code=500,
detail=routeApiMsg("Feature instance has no mandateId"),
)
return str(mandateId)
def _toHttpStatus(e: RedmineApiError) -> int:
if e.status in (400, 401, 403, 404, 409, 422):
return e.status
return 502
def _handleRedmineError(e: RedmineApiError) -> HTTPException:
return HTTPException(status_code=_toHttpStatus(e), detail=f"Redmine: {e}")
# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------
@router.get("/{instanceId}/config", response_model=RedmineConfigDto)
@limiter.limit("60/minute")
async def getConfig(
request: Request,
instanceId: str,
context: RequestContext = Depends(getRequestContext),
) -> RedmineConfigDto:
mandateId = _validateInstanceAccess(instanceId, context)
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
return iface.getConfigDto(instanceId)
@router.put("/{instanceId}/config", response_model=RedmineConfigDto)
@limiter.limit("20/minute")
async def updateConfig(
request: Request,
instanceId: str,
body: RedmineConfigUpdateRequest = Body(...),
context: RequestContext = Depends(getRequestContext),
) -> RedmineConfigDto:
mandateId = _validateInstanceAccess(instanceId, context)
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
dto = iface.upsertConfig(instanceId, body)
_audit(
context,
"redmine.config.updated",
"RedmineInstanceConfig",
instanceId,
details=f"baseUrl={dto.baseUrl} projectId={dto.projectId} hasApiKey={dto.hasApiKey}",
)
return dto
@router.delete("/{instanceId}/config")
@limiter.limit("20/minute")
async def deleteConfig(
request: Request,
instanceId: str,
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
mandateId = _validateInstanceAccess(instanceId, context)
iface = interfaceDb.getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
deleted = iface.deleteConfig(instanceId)
_audit(context, "redmine.config.deleted", "RedmineInstanceConfig", instanceId, success=deleted)
return {"deleted": deleted}
@router.post("/{instanceId}/config/test")
@limiter.limit("20/minute")
async def testConfig(
request: Request,
instanceId: str,
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
mandateId = _validateInstanceAccess(instanceId, context)
result = await serviceRedmine.testConnection(context.user, mandateId, instanceId)
_audit(
context,
"redmine.config.test",
"RedmineInstanceConfig",
instanceId,
success=bool(result.get("ok")),
errorMessage=str(result.get("message")) if not result.get("ok") else None,
)
return result
# ---------------------------------------------------------------------------
# Schema
# ---------------------------------------------------------------------------
@router.get("/{instanceId}/schema", response_model=RedmineFieldSchemaDto)
@limiter.limit("60/minute")
async def getSchema(
request: Request,
instanceId: str,
forceRefresh: bool = Query(False),
context: RequestContext = Depends(getRequestContext),
) -> RedmineFieldSchemaDto:
mandateId = _validateInstanceAccess(instanceId, context)
try:
return await serviceRedmine.getProjectMeta(
context.user, mandateId, instanceId, forceRefresh=forceRefresh
)
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
raise _handleRedmineError(e)
# ---------------------------------------------------------------------------
# Sync (mirror)
# ---------------------------------------------------------------------------
@router.post("/{instanceId}/sync", response_model=RedmineSyncResultDto)
@limiter.limit("6/minute")
async def runSync(
request: Request,
instanceId: str,
force: bool = Query(default=False, description="True -> ignore lastSyncAt and pull every issue."),
context: RequestContext = Depends(getRequestContext),
) -> RedmineSyncResultDto:
mandateId = _validateInstanceAccess(instanceId, context)
try:
result = await serviceRedmineSync.runSync(
context.user, mandateId, instanceId, force=force
)
_audit(
context,
"redmine.sync.completed",
"RedmineInstanceConfig",
instanceId,
details=f"full={result.full} tickets={result.ticketsUpserted} relations={result.relationsUpserted} {result.durationMs}ms",
)
return result
except RedmineApiError as e:
_audit(
context,
"redmine.sync.completed",
"RedmineInstanceConfig",
instanceId,
success=False,
errorMessage=str(e),
)
raise _handleRedmineError(e)
except Exception as e:
_audit(
context,
"redmine.sync.completed",
"RedmineInstanceConfig",
instanceId,
success=False,
errorMessage=str(e),
)
raise HTTPException(status_code=500, detail=f"Sync failed: {e}")
@router.get("/{instanceId}/sync/status", response_model=RedmineSyncStatusDto)
@limiter.limit("60/minute")
async def getSyncStatus(
request: Request,
instanceId: str,
context: RequestContext = Depends(getRequestContext),
) -> RedmineSyncStatusDto:
mandateId = _validateInstanceAccess(instanceId, context)
return serviceRedmineSync.getSyncStatus(context.user, mandateId, instanceId)
# ---------------------------------------------------------------------------
# Tickets
# ---------------------------------------------------------------------------
@router.get("/{instanceId}/tickets", response_model=List[RedmineTicketDto])
@limiter.limit("60/minute")
async def listTickets(
request: Request,
instanceId: str,
trackerIds: Optional[List[int]] = Query(default=None),
status: str = Query(default="*"),
dateFrom: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on >= dateFrom"),
dateTo: Optional[str] = Query(default=None, description="ISO date (YYYY-MM-DD) -- updated_on <= dateTo"),
assignedToId: Optional[int] = Query(default=None),
context: RequestContext = Depends(getRequestContext),
) -> List[RedmineTicketDto]:
"""Reads from the local mirror. Trigger a sync via ``POST /sync`` first."""
mandateId = _validateInstanceAccess(instanceId, context)
return serviceRedmine.listTickets(
context.user,
mandateId,
instanceId,
trackerIds=trackerIds,
statusFilter=status,
updatedOnFrom=dateFrom,
updatedOnTo=dateTo,
assignedToId=assignedToId,
)
@router.get("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
@limiter.limit("120/minute")
async def getTicket(
request: Request,
instanceId: str,
issueId: int,
context: RequestContext = Depends(getRequestContext),
) -> RedmineTicketDto:
mandateId = _validateInstanceAccess(instanceId, context)
ticket = serviceRedmine.getTicket(context.user, mandateId, instanceId, issueId)
if ticket is None:
raise HTTPException(status_code=404, detail=f"Ticket {issueId} not in mirror; run a sync first.")
return ticket
@router.post("/{instanceId}/tickets", response_model=RedmineTicketDto)
@limiter.limit("30/minute")
async def createTicket(
request: Request,
instanceId: str,
body: RedmineTicketCreateRequest = Body(...),
context: RequestContext = Depends(getRequestContext),
) -> RedmineTicketDto:
mandateId = _validateInstanceAccess(instanceId, context)
try:
ticket = await serviceRedmine.createTicket(context.user, mandateId, instanceId, body)
_audit(context, "redmine.ticket.created", "RedmineTicket", str(ticket.id), details=f"trackerId={body.trackerId}")
return ticket
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
_audit(context, "redmine.ticket.created", "RedmineTicket", "?", success=False, errorMessage=str(e))
raise _handleRedmineError(e)
@router.put("/{instanceId}/tickets/{issueId}", response_model=RedmineTicketDto)
@limiter.limit("60/minute")
async def updateTicket(
request: Request,
instanceId: str,
issueId: int,
body: RedmineTicketUpdateRequest = Body(...),
context: RequestContext = Depends(getRequestContext),
) -> RedmineTicketDto:
mandateId = _validateInstanceAccess(instanceId, context)
try:
ticket = await serviceRedmine.updateTicket(context.user, mandateId, instanceId, issueId, body)
_audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId))
return ticket
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
_audit(context, "redmine.ticket.updated", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
raise _handleRedmineError(e)
@router.delete("/{instanceId}/tickets/{issueId}")
@limiter.limit("30/minute")
async def deleteTicket(
request: Request,
instanceId: str,
issueId: int,
fallbackStatusId: Optional[int] = Query(default=None, description="If Redmine forbids DELETE, set this status instead"),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
mandateId = _validateInstanceAccess(instanceId, context)
try:
result = await serviceRedmine.deleteTicket(
context.user, mandateId, instanceId, issueId, fallbackStatusId=fallbackStatusId
)
_audit(
context,
"redmine.ticket.deleted",
"RedmineTicket",
str(issueId),
success=bool(result.get("deleted") or result.get("archived")),
details=f"deleted={result.get('deleted')} archived={result.get('archived')}",
)
return result
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
_audit(context, "redmine.ticket.deleted", "RedmineTicket", str(issueId), success=False, errorMessage=str(e))
raise _handleRedmineError(e)
# ---------------------------------------------------------------------------
# Relations
# ---------------------------------------------------------------------------
@router.post("/{instanceId}/tickets/{issueId}/relations")
@limiter.limit("30/minute")
async def addRelation(
request: Request,
instanceId: str,
issueId: int,
body: RedmineRelationCreateRequest = Body(...),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
mandateId = _validateInstanceAccess(instanceId, context)
try:
rel = await serviceRedmine.addRelation(context.user, mandateId, instanceId, issueId, body)
_audit(
context,
"redmine.relation.created",
"RedmineRelation",
str(rel.get("id")),
details=f"{issueId} -[{body.relationType}]-> {body.issueToId}",
)
return {"relation": rel}
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
_audit(
context,
"redmine.relation.created",
"RedmineRelation",
f"{issueId}->{body.issueToId}",
success=False,
errorMessage=str(e),
)
raise _handleRedmineError(e)
@router.delete("/{instanceId}/relations/{relationId}")
@limiter.limit("30/minute")
async def deleteRelation(
request: Request,
instanceId: str,
relationId: int,
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
mandateId = _validateInstanceAccess(instanceId, context)
try:
ok = await serviceRedmine.deleteRelation(context.user, mandateId, instanceId, relationId)
_audit(context, "redmine.relation.deleted", "RedmineRelation", str(relationId), success=ok)
return {"deleted": ok}
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
_audit(
context,
"redmine.relation.deleted",
"RedmineRelation",
str(relationId),
success=False,
errorMessage=str(e),
)
raise _handleRedmineError(e)
# ---------------------------------------------------------------------------
# Stats
# ---------------------------------------------------------------------------
@router.get("/{instanceId}/stats", response_model=RedmineStatsDto)
@limiter.limit("60/minute")
async def getStats(
request: Request,
instanceId: str,
dateFrom: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
dateTo: Optional[str] = Query(default=None, description="ISO date YYYY-MM-DD"),
bucket: str = Query(default="week", regex="^(day|week|month)$"),
trackerIds: Optional[List[int]] = Query(default=None),
categoryIds: Optional[List[int]] = Query(default=None, description="Filter by Redmine issue categories"),
statusFilter: str = Query(default="*", regex="^(\\*|open|closed)$", description="Restrict to open/closed/all tickets"),
context: RequestContext = Depends(getRequestContext),
) -> RedmineStatsDto:
mandateId = _validateInstanceAccess(instanceId, context)
try:
return await serviceRedmineStats.getStats(
context.user,
mandateId,
instanceId,
dateFrom=dateFrom,
dateTo=dateTo,
bucket=bucket,
trackerIds=trackerIds,
categoryIds=categoryIds,
statusFilter=statusFilter,
)
except RedmineNotConfiguredError as e:
raise HTTPException(status_code=409, detail=str(e))
except RedmineApiError as e:
raise _handleRedmineError(e)

View file

@ -0,0 +1,617 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine service layer.
Sits between the FastAPI route and the connector. Responsibilities:
- Resolve the connector for an authenticated user / feature instance.
- Cache project meta (trackers, statuses, priorities, custom fields, users)
on the instance config.
- Resolve the configured ``rootTrackerName`` against the live tracker list.
No heuristic / no auto-detect.
- **Reads come from the local mirror** (``RedmineTicketMirror`` /
``RedmineRelationMirror`` in ``poweron_redmine``). The mirror is
populated by ``serviceRedmineSync`` (button or scheduler).
- **Writes go to Redmine, then immediately upsert the affected ticket
into the mirror** so the UI is consistent without waiting for a sync.
- Invalidate ``serviceRedmineStatsCache`` after every successful write.
All AI-tool-friendly entry points are pure async functions taking the
authenticated ``User`` plus the explicit ``featureInstanceId`` and
``mandateId`` so the same service can be called from REST and from the
workflow engine without context-magic.
"""
from __future__ import annotations
import logging
import time
from typing import Any, Dict, List, Optional, Tuple
from modules.connectors.connectorTicketsRedmine import (
ConnectorTicketsRedmine,
RedmineApiError,
)
from modules.datamodels.datamodelUam import User
from modules.features.redmine.datamodelRedmine import (
RedmineCustomFieldSchemaDto,
RedmineCustomFieldValueDto,
RedmineFieldChoiceDto,
RedmineFieldSchemaDto,
RedmineRelationCreateRequest,
RedmineRelationDto,
RedmineTicketCreateRequest,
RedmineTicketDto,
RedmineTicketUpdateRequest,
)
from modules.features.redmine.interfaceFeatureRedmine import (
RedmineObjects,
getInterface,
)
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Resolution helpers
# ---------------------------------------------------------------------------
class RedmineNotConfiguredError(RuntimeError):
"""The given feature instance has no usable Redmine config."""
def _resolveContext(
currentUser: User, mandateId: Optional[str], featureInstanceId: str
) -> Tuple[RedmineObjects, ConnectorTicketsRedmine]:
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
connector = iface.resolveConnector(featureInstanceId)
if not connector:
raise RedmineNotConfiguredError(
f"Redmine instance {featureInstanceId} is not configured or inactive"
)
return iface, connector
# ---------------------------------------------------------------------------
# Project meta -- with TTL cache stored on the config record
# ---------------------------------------------------------------------------
async def getProjectMeta(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
*,
forceRefresh: bool = False,
) -> RedmineFieldSchemaDto:
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
cfg = iface.getConfig(featureInstanceId)
if cfg is None:
raise RedmineNotConfiguredError("Config row vanished after connector resolve")
ttl = cfg.schemaCacheTtlSeconds if cfg.schemaCacheTtlSeconds is not None else 24 * 60 * 60
fresh_enough = (
cfg.schemaCache
and cfg.schemaCachedAt
and (time.time() - cfg.schemaCachedAt) < ttl
)
if fresh_enough and not forceRefresh:
schema = _schemaFromCache(cfg.projectId, cfg.schemaCache, cfg.rootTrackerName)
if schema is not None:
return schema
project_info = await connector.getProjectInfo()
trackers_raw = await connector.getTrackers()
statuses_raw = await connector.getStatuses()
priorities_raw = await connector.getPriorities()
custom_fields_raw = await connector.getCustomFields()
users_raw = await connector.getProjectUsers()
categories_raw = await connector.getIssueCategories()
schema_cache: Dict[str, Any] = {
"projectName": project_info.get("name", ""),
"trackers": [{"id": t.get("id"), "name": t.get("name")} for t in trackers_raw],
"statuses": [
{
"id": s.get("id"),
"name": s.get("name"),
"isClosed": bool(s.get("is_closed")),
}
for s in statuses_raw
],
"priorities": [{"id": p.get("id"), "name": p.get("name")} for p in priorities_raw],
"users": [{"id": u.get("id"), "name": u.get("name")} for u in users_raw],
"categories": [{"id": c.get("id"), "name": c.get("name")} for c in categories_raw if c.get("id") is not None],
"customFields": [
{
"id": cf.get("id"),
"name": cf.get("name"),
"fieldFormat": cf.get("field_format", "string"),
"isRequired": bool(cf.get("is_required")),
"possibleValues": [pv.get("value") for pv in (cf.get("possible_values") or []) if pv.get("value") is not None],
"multiple": bool(cf.get("multiple")),
"defaultValue": cf.get("default_value"),
}
for cf in custom_fields_raw
],
}
iface.updateSchemaCache(featureInstanceId, schema_cache)
iface.markConfigConnected(featureInstanceId)
return _schemaFromCache(cfg.projectId, schema_cache, cfg.rootTrackerName) or RedmineFieldSchemaDto(
projectId=cfg.projectId,
projectName=schema_cache["projectName"],
rootTrackerName=cfg.rootTrackerName,
)
def _resolveRootTrackerId(
rootTrackerName: str, trackers: List[Dict[str, Any]]
) -> Optional[int]:
"""Resolve the configured root tracker name to a tracker id.
Strict: case-insensitive exact match. Returns ``None`` if not found
(the UI must surface this as a config error).
"""
target = (rootTrackerName or "").strip().lower()
if not target:
return None
for t in trackers:
if str(t.get("name") or "").strip().lower() == target:
tid = t.get("id")
return int(tid) if tid is not None else None
return None
def _schemaFromCache(
projectId: str, cache: Optional[Dict[str, Any]], rootTrackerName: str
) -> Optional[RedmineFieldSchemaDto]:
if not cache:
return None
trackers = cache.get("trackers") or []
return RedmineFieldSchemaDto(
projectId=projectId,
projectName=str(cache.get("projectName") or ""),
trackers=[RedmineFieldChoiceDto(**t) for t in trackers],
statuses=[RedmineFieldChoiceDto(**s) for s in cache.get("statuses") or []],
priorities=[RedmineFieldChoiceDto(**p) for p in cache.get("priorities") or []],
users=[RedmineFieldChoiceDto(**u) for u in cache.get("users") or []],
categories=[RedmineFieldChoiceDto(**c) for c in cache.get("categories") or []],
customFields=[
RedmineCustomFieldSchemaDto(
id=cf.get("id"),
name=cf.get("name", ""),
fieldFormat=cf.get("fieldFormat", "string"),
isRequired=bool(cf.get("isRequired")),
possibleValues=list(cf.get("possibleValues") or []),
multiple=bool(cf.get("multiple")),
defaultValue=cf.get("defaultValue"),
)
for cf in cache.get("customFields") or []
if cf.get("id") is not None
],
rootTrackerName=rootTrackerName,
rootTrackerId=_resolveRootTrackerId(rootTrackerName, trackers),
)
# ---------------------------------------------------------------------------
# Mirror -> RedmineTicketDto
# ---------------------------------------------------------------------------
def _mirroredRowToDto(
row: Dict[str, Any], relations: List[Dict[str, Any]], includeRaw: bool = False
) -> RedmineTicketDto:
return RedmineTicketDto(
id=int(row.get("redmineId")),
subject=str(row.get("subject") or ""),
description=str(row.get("description") or ""),
trackerId=row.get("trackerId"),
trackerName=row.get("trackerName"),
statusId=row.get("statusId"),
statusName=row.get("statusName"),
isClosed=bool(row.get("isClosed")),
priorityId=row.get("priorityId"),
priorityName=row.get("priorityName"),
assignedToId=row.get("assignedToId"),
assignedToName=row.get("assignedToName"),
authorId=row.get("authorId"),
authorName=row.get("authorName"),
parentId=row.get("parentId"),
fixedVersionId=row.get("fixedVersionId"),
fixedVersionName=row.get("fixedVersionName"),
categoryId=row.get("categoryId"),
categoryName=row.get("categoryName"),
createdOn=row.get("createdOn"),
updatedOn=row.get("updatedOn"),
customFields=[
RedmineCustomFieldValueDto(
id=int(cf.get("id")),
name=str(cf.get("name") or ""),
value=cf.get("value"),
)
for cf in (row.get("customFields") or [])
if cf.get("id") is not None
],
relations=[
RedmineRelationDto(
id=int(r.get("redmineRelationId") or r.get("id")),
issueId=int(r.get("issueId")),
issueToId=int(r.get("issueToId")),
relationType=str(r.get("relationType") or "relates"),
delay=r.get("delay"),
)
for r in relations
if (r.get("redmineRelationId") or r.get("id")) is not None
],
raw=row.get("raw") if includeRaw else None,
)
def _isoToEpoch(value: Optional[str]) -> Optional[float]:
if not value:
return None
try:
from datetime import datetime
return datetime.fromisoformat(value.replace("Z", "+00:00")).timestamp()
except Exception:
return None
# ---------------------------------------------------------------------------
# Read API -- from mirror
# ---------------------------------------------------------------------------
def listTickets(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
*,
trackerIds: Optional[List[int]] = None,
statusFilter: str = "*",
updatedOnFrom: Optional[str] = None,
updatedOnTo: Optional[str] = None,
assignedToId: Optional[int] = None,
) -> List[RedmineTicketDto]:
"""List tickets from the local mirror.
``statusFilter`` accepts ``"open"``, ``"closed"`` or ``"*"`` (any),
matching the Redmine ``status_id`` semantics.
"""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
rows = iface.listMirroredTickets(
featureInstanceId,
trackerIds=trackerIds,
assigneeId=assignedToId,
updatedFromTs=_isoToEpoch(updatedOnFrom),
updatedToTs=_isoToEpoch(updatedOnTo),
)
if statusFilter and statusFilter != "*":
want_closed = statusFilter == "closed"
rows = [r for r in rows if bool(r.get("isClosed")) == want_closed]
relations_all = iface.listMirroredRelations(featureInstanceId)
relations_by_issue: Dict[int, List[Dict[str, Any]]] = {}
ids = {int(r.get("redmineId")) for r in rows}
for r in relations_all:
a = int(r.get("issueId") or 0)
b = int(r.get("issueToId") or 0)
for k in (a, b):
if k in ids:
relations_by_issue.setdefault(k, []).append(r)
return [
_mirroredRowToDto(row, relations_by_issue.get(int(row.get("redmineId")), []))
for row in rows
]
def getTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
*,
includeRaw: bool = True,
) -> Optional[RedmineTicketDto]:
"""Read a single ticket from the mirror. Returns ``None`` when not present."""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
rows = iface.listMirroredTickets(featureInstanceId)
target = next((r for r in rows if int(r.get("redmineId") or 0) == int(issueId)), None)
if target is None:
return None
relations_all = iface.listMirroredRelations(featureInstanceId)
rel = [
r for r in relations_all
if int(r.get("issueId") or 0) == int(issueId) or int(r.get("issueToId") or 0) == int(issueId)
]
return _mirroredRowToDto(target, rel, includeRaw=includeRaw)
# ---------------------------------------------------------------------------
# Write API -- idempotent + cache invalidation + mirror upsert
# ---------------------------------------------------------------------------
def _invalidateCache(featureInstanceId: str) -> None:
try:
_getStatsCache().invalidateInstance(featureInstanceId)
except Exception as e:
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
def _diffPayload(
current: RedmineTicketDto, update: RedmineTicketUpdateRequest
) -> Dict[str, Any]:
"""Return the Redmine ``issue`` payload containing only changed fields."""
payload: Dict[str, Any] = {}
if update.subject is not None and update.subject != current.subject:
payload["subject"] = update.subject
if update.description is not None and update.description != current.description:
payload["description"] = update.description
if update.trackerId is not None and update.trackerId != current.trackerId:
payload["tracker_id"] = int(update.trackerId)
if update.statusId is not None and update.statusId != current.statusId:
payload["status_id"] = int(update.statusId)
if update.priorityId is not None and update.priorityId != current.priorityId:
payload["priority_id"] = int(update.priorityId)
if update.assignedToId is not None and update.assignedToId != current.assignedToId:
payload["assigned_to_id"] = int(update.assignedToId)
if update.parentIssueId is not None and update.parentIssueId != current.parentId:
payload["parent_issue_id"] = int(update.parentIssueId)
if update.fixedVersionId is not None and update.fixedVersionId != current.fixedVersionId:
payload["fixed_version_id"] = int(update.fixedVersionId)
if update.customFields:
current_by_id = {cf.id: cf.value for cf in current.customFields}
cf_payload: List[Dict[str, Any]] = []
for cf_id, value in update.customFields.items():
try:
cf_id_int = int(cf_id)
except Exception:
continue
if current_by_id.get(cf_id_int) != value:
cf_payload.append({"id": cf_id_int, "value": value})
if cf_payload:
payload["custom_fields"] = cf_payload
return payload
async def _refreshMirroredTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
) -> None:
from modules.features.redmine.serviceRedmineSync import upsertSingleTicket
try:
await upsertSingleTicket(currentUser, mandateId, featureInstanceId, int(issueId))
except Exception as e:
logger.warning(f"Mirror upsert for issue {issueId} failed: {e}")
async def updateTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
update: RedmineTicketUpdateRequest,
) -> RedmineTicketDto:
"""Idempotent: fetch the issue from Redmine (live, for diff accuracy),
only PUT if non-empty, then upsert the mirror."""
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
current_live = await connector.getIssue(int(issueId), includeRelations=False)
current = _liveIssueToDto(current_live, schema)
payload = _diffPayload(current, update)
if not payload and not update.notes:
return current
await connector.updateIssue(int(issueId), payload, notes=update.notes)
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
_invalidateCache(featureInstanceId)
refreshed = getTicket(currentUser, mandateId, featureInstanceId, int(issueId), includeRaw=True)
return refreshed or current
async def createTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
payload: RedmineTicketCreateRequest,
) -> RedmineTicketDto:
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
fields: Dict[str, Any] = {
"subject": payload.subject,
"tracker_id": int(payload.trackerId),
"description": payload.description or "",
}
if payload.statusId is not None:
fields["status_id"] = int(payload.statusId)
if payload.priorityId is not None:
fields["priority_id"] = int(payload.priorityId)
if payload.assignedToId is not None:
fields["assigned_to_id"] = int(payload.assignedToId)
if payload.parentIssueId is not None:
fields["parent_issue_id"] = int(payload.parentIssueId)
if payload.fixedVersionId is not None:
fields["fixed_version_id"] = int(payload.fixedVersionId)
if payload.customFields:
fields["custom_fields"] = [
{"id": int(k), "value": v} for k, v in payload.customFields.items()
]
created = await connector.createIssue(fields)
if created.get("id"):
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(created["id"]))
_invalidateCache(featureInstanceId)
if not created.get("id"):
return _liveIssueToDto(created, schema, includeRaw=True)
fresh = getTicket(currentUser, mandateId, featureInstanceId, int(created["id"]), includeRaw=True)
return fresh or _liveIssueToDto(created, schema, includeRaw=True)
async def deleteTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
*,
fallbackStatusId: Optional[int] = None,
) -> Dict[str, Any]:
"""Try DELETE; on Redmine's 403/401 silently fall back to a closed
status if ``fallbackStatusId`` is provided.
Returns ``{deleted: bool, archived: bool, statusId: int|None}``.
"""
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
deleted = await connector.deleteIssue(int(issueId))
if deleted:
from modules.features.redmine.serviceRedmineSync import deleteMirroredTicket
deleteMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
_invalidateCache(featureInstanceId)
return {"deleted": True, "archived": False, "statusId": None}
if fallbackStatusId is not None:
await connector.updateIssue(
int(issueId),
{"status_id": int(fallbackStatusId)},
notes="Archived via Porta -- delete forbidden by Redmine",
)
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
_invalidateCache(featureInstanceId)
return {"deleted": False, "archived": True, "statusId": int(fallbackStatusId)}
return {"deleted": False, "archived": False, "statusId": None}
async def addRelation(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
payload: RedmineRelationCreateRequest,
) -> Dict[str, Any]:
_, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
rel = await connector.addRelation(
int(issueId),
int(payload.issueToId),
relationType=payload.relationType,
delay=payload.delay,
)
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(issueId))
await _refreshMirroredTicket(currentUser, mandateId, featureInstanceId, int(payload.issueToId))
_invalidateCache(featureInstanceId)
return rel
async def deleteRelation(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
relationId: int,
) -> bool:
iface, connector = _resolveContext(currentUser, mandateId, featureInstanceId)
ok = await connector.deleteRelation(int(relationId))
if ok:
iface.deleteMirroredRelationByRedmineId(featureInstanceId, int(relationId))
_invalidateCache(featureInstanceId)
return ok
# ---------------------------------------------------------------------------
# Live (Redmine) -> RedmineTicketDto -- only used by the write paths to
# compute idempotent diffs against the current Redmine state.
# ---------------------------------------------------------------------------
def _statusIsClosedFromSchema(statusId: Optional[int], schema: Optional[RedmineFieldSchemaDto]) -> bool:
if statusId is None or schema is None:
return False
for s in schema.statuses:
if s.id == statusId:
return bool(s.isClosed)
return False
def _liveIssueToDto(
issue: Dict[str, Any], schema: Optional[RedmineFieldSchemaDto] = None, *, includeRaw: bool = False
) -> RedmineTicketDto:
tracker = issue.get("tracker") or {}
status = issue.get("status") or {}
priority = issue.get("priority") or {}
assigned = issue.get("assigned_to") or {}
author = issue.get("author") or {}
fixed_version = issue.get("fixed_version") or {}
category = issue.get("category") or {}
status_id = status.get("id")
return RedmineTicketDto(
id=int(issue.get("id")),
subject=str(issue.get("subject") or ""),
description=str(issue.get("description") or ""),
trackerId=tracker.get("id"),
trackerName=tracker.get("name"),
statusId=status_id,
statusName=status.get("name"),
isClosed=_statusIsClosedFromSchema(status_id, schema),
priorityId=priority.get("id"),
priorityName=priority.get("name"),
assignedToId=assigned.get("id"),
assignedToName=assigned.get("name"),
authorId=author.get("id"),
authorName=author.get("name"),
parentId=(issue.get("parent") or {}).get("id"),
fixedVersionId=fixed_version.get("id"),
fixedVersionName=fixed_version.get("name"),
categoryId=category.get("id"),
categoryName=category.get("name"),
createdOn=issue.get("created_on"),
updatedOn=issue.get("updated_on"),
customFields=[
RedmineCustomFieldValueDto(
id=int(cf.get("id")),
name=str(cf.get("name") or ""),
value=cf.get("value"),
)
for cf in issue.get("custom_fields") or []
if cf.get("id") is not None
],
relations=[
RedmineRelationDto(
id=int(r.get("id")),
issueId=int(r.get("issue_id")),
issueToId=int(r.get("issue_to_id")),
relationType=str(r.get("relation_type") or "relates"),
delay=r.get("delay"),
)
for r in issue.get("relations") or []
if r.get("id") is not None
],
raw=issue if includeRaw else None,
)
# ---------------------------------------------------------------------------
# Connection self-test (used by the Settings page button)
# ---------------------------------------------------------------------------
async def testConnection(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
) -> Dict[str, Any]:
"""Calls ``whoAmI`` and a minimal project fetch. Updates the
``lastConnectedAt`` timestamp on success. Never raises -- returns a
structured dict for the UI."""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
connector = iface.resolveConnector(featureInstanceId)
if not connector:
return {"ok": False, "reason": "notConfigured", "message": "Keine gueltige Redmine-Konfiguration."}
try:
user = await connector.whoAmI()
project = await connector.getProjectInfo()
iface.markConfigConnected(featureInstanceId)
return {
"ok": True,
"user": {"id": user.get("id"), "name": (user.get("firstname") or "") + " " + (user.get("lastname") or "")},
"project": {"id": project.get("id"), "name": project.get("name")},
}
except RedmineApiError as e:
return {"ok": False, "reason": "httpError", "status": e.status, "message": (e.body or "")[:300]}
except Exception as e:
return {"ok": False, "reason": "exception", "message": str(e)[:300]}

View file

@ -0,0 +1,521 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine statistics aggregator.
Returns raw buckets in :class:`RedmineStatsDto`. The frontend
(``RedmineStatsPage.tsx``) maps these onto ``ReportSection`` for
``FormGeneratorReport``. Decision 2026-04-21.
Sections produced:
- KPIs: total / open / closed / closedInPeriod / createdInPeriod / orphans
- statusByTracker (stacked bar)
- throughput (line chart, created vs closed per bucket)
- topAssignees (top-10 horizontal bar)
- relationDistribution (pie)
- backlogAging (open issues by age since last update)
The whole result is cached in :mod:`serviceRedmineStatsCache` keyed by
``(instanceId, dateFrom, dateTo, bucket, trackerIds)`` with a 90 s TTL.
"""
from __future__ import annotations
import bisect
import datetime as _dt
import logging
from collections import Counter, defaultdict
from typing import Any, Dict, Iterable, List, Optional, Tuple
from modules.datamodels.datamodelUam import User
from modules.features.redmine.datamodelRedmine import (
RedmineAgingBucket,
RedmineAssigneeBucket,
RedmineFieldSchemaDto,
RedmineRelationDistributionEntry,
RedmineStatsDto,
RedmineStatsKpis,
RedmineStatusByTrackerEntry,
RedmineThroughputBucket,
RedmineTicketDto,
)
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Public entry
# ---------------------------------------------------------------------------
async def getStats(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
*,
dateFrom: Optional[str] = None,
dateTo: Optional[str] = None,
bucket: str = "week",
trackerIds: Optional[List[int]] = None,
categoryIds: Optional[List[int]] = None,
statusFilter: str = "*",
) -> RedmineStatsDto:
"""Compute (or fetch from cache) the full statistics payload."""
bucket_norm = (bucket or "week").lower()
if bucket_norm not in {"day", "week", "month"}:
bucket_norm = "week"
tracker_ids_norm: List[int] = sorted({int(t) for t in trackerIds or []})
category_ids_norm: List[int] = sorted({int(c) for c in categoryIds or []})
status_norm = (statusFilter or "*").lower()
if status_norm not in {"*", "open", "closed"}:
status_norm = "*"
cache = _getStatsCache()
# Cache key now includes the new dimensions so different filter combos
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
# for us, so we can pass them directly as extra dimensions.
cache_key = cache.buildKey(
featureInstanceId, dateFrom, dateTo, bucket_norm, tracker_ids_norm,
category_ids_norm, status_norm,
)
cached = cache.get(cache_key)
if cached is not None:
return cached
# Lazy import: keeps the pure aggregation helpers below importable
# without dragging in aiohttp / DB connector at module load.
from modules.features.redmine.serviceRedmine import (
getProjectMeta,
listTickets,
)
schema = await getProjectMeta(currentUser, mandateId, featureInstanceId)
root_tracker_id = schema.rootTrackerId
tickets = listTickets(
currentUser,
mandateId,
featureInstanceId,
trackerIds=tracker_ids_norm or None,
statusFilter=status_norm,
)
if category_ids_norm:
cat_set = set(category_ids_norm)
tickets = [t for t in tickets if t.categoryId in cat_set]
stats = _aggregate(
tickets,
schema=schema,
rootTrackerId=root_tracker_id,
dateFrom=dateFrom,
dateTo=dateTo,
bucket=bucket_norm,
trackerIdsFilter=tracker_ids_norm,
categoryIdsFilter=category_ids_norm,
statusFilter=status_norm,
instanceId=featureInstanceId,
)
cache.set(cache_key, stats)
return stats
# ---------------------------------------------------------------------------
# Pure aggregation (testable without I/O)
# ---------------------------------------------------------------------------
def _aggregate(
tickets: List[RedmineTicketDto],
*,
schema: Optional[RedmineFieldSchemaDto],
rootTrackerId: Optional[int],
dateFrom: Optional[str],
dateTo: Optional[str],
bucket: str,
trackerIdsFilter: List[int],
categoryIdsFilter: List[int],
statusFilter: str,
instanceId: str,
) -> RedmineStatsDto:
period_from = _parseIsoDate(dateFrom)
period_to = _parseIsoDate(dateTo)
kpis = _kpis(tickets, rootTrackerId, period_from, period_to)
status_by_tracker = _statusByTracker(tickets, schema)
throughput = _throughput(tickets, period_from, period_to, bucket)
top_assignees = _topAssignees(tickets, limit=10)
relation_distribution = _relationDistribution(tickets)
backlog_aging = _backlogAging(tickets, now=_utcNow())
return RedmineStatsDto(
instanceId=instanceId,
dateFrom=dateFrom,
dateTo=dateTo,
bucket=bucket,
trackerIds=trackerIdsFilter,
categoryIds=categoryIdsFilter,
statusFilter=statusFilter,
kpis=kpis,
statusByTracker=status_by_tracker,
throughput=throughput,
topAssignees=top_assignees,
relationDistribution=relation_distribution,
backlogAging=backlog_aging,
)
# ---------------------------------------------------------------------------
# Section builders
# ---------------------------------------------------------------------------
def _kpis(
tickets: List[RedmineTicketDto],
rootTrackerId: Optional[int],
periodFrom: Optional[_dt.datetime],
periodTo: Optional[_dt.datetime],
) -> RedmineStatsKpis:
total = len(tickets)
open_count = sum(1 for t in tickets if not t.isClosed)
closed_count = sum(1 for t in tickets if t.isClosed)
closed_in_period = 0
created_in_period = 0
for t in tickets:
created = _parseIsoDate(t.createdOn)
updated = _parseIsoDate(t.updatedOn)
if created and _inPeriod(created, periodFrom, periodTo):
created_in_period += 1
if t.isClosed and updated and _inPeriod(updated, periodFrom, periodTo):
closed_in_period += 1
orphans = _countOrphans(tickets, rootTrackerId)
return RedmineStatsKpis(
total=total,
open=open_count,
closed=closed_count,
closedInPeriod=closed_in_period,
createdInPeriod=created_in_period,
orphans=orphans,
)
def _countOrphans(
tickets: List[RedmineTicketDto], rootTrackerId: Optional[int]
) -> int:
"""A ticket is an orphan if it is not a root user-story AND not
reachable (via parent or any relation, in either direction) to any
root user-story within the same loaded set."""
if not tickets:
return 0
by_id: Dict[int, RedmineTicketDto] = {t.id: t for t in tickets}
roots: set[int] = {
t.id for t in tickets if rootTrackerId and t.trackerId == rootTrackerId
}
if not roots:
return sum(1 for t in tickets if not (rootTrackerId and t.trackerId == rootTrackerId))
adjacency: Dict[int, set[int]] = defaultdict(set)
for t in tickets:
if t.parentId is not None and t.parentId in by_id:
adjacency[t.id].add(t.parentId)
adjacency[t.parentId].add(t.id)
for r in t.relations:
for a, b in ((r.issueId, r.issueToId), (r.issueToId, r.issueId)):
if a in by_id and b in by_id and a != b:
adjacency[a].add(b)
reached: set[int] = set(roots)
frontier: List[int] = list(roots)
while frontier:
nxt: List[int] = []
for tid in frontier:
for neighbour in adjacency.get(tid, ()): # type: ignore[arg-type]
if neighbour not in reached:
reached.add(neighbour)
nxt.append(neighbour)
frontier = nxt
return sum(1 for t in tickets if t.id not in reached)
def _statusByTracker(
tickets: List[RedmineTicketDto], schema: Optional[RedmineFieldSchemaDto]
) -> List[RedmineStatusByTrackerEntry]:
by_tracker: Dict[Tuple[Optional[int], str], Counter] = defaultdict(Counter)
for t in tickets:
key = (t.trackerId, t.trackerName or "(unbekannt)")
by_tracker[key][t.statusName or "(unbekannt)"] += 1
out: List[RedmineStatusByTrackerEntry] = []
for (tid, tname), ctr in by_tracker.items():
out.append(
RedmineStatusByTrackerEntry(
trackerId=tid,
trackerName=tname,
countsByStatus=dict(ctr),
total=sum(ctr.values()),
)
)
out.sort(key=lambda e: e.total, reverse=True)
return out
def _throughput(
tickets: List[RedmineTicketDto],
periodFrom: Optional[_dt.datetime],
periodTo: Optional[_dt.datetime],
bucket: str,
) -> List[RedmineThroughputBucket]:
"""Build per-bucket snapshots: how many tickets exist at the END of
each bucket, and how many of those are still open at that point.
``created`` / ``closed`` keep the raw delta numbers so callers (and
AI tools) that want the flow can still see them. The UI line chart
plots ``cumTotal`` and ``cumOpen``.
"""
if not tickets:
return []
# If no period is set, span the lifetime of the data.
if periodFrom is None or periodTo is None:
all_dates: List[_dt.datetime] = []
for t in tickets:
for s in (t.createdOn, t.updatedOn):
d = _parseIsoDate(s)
if d:
all_dates.append(d)
if not all_dates:
return []
periodFrom = periodFrom or min(all_dates)
periodTo = periodTo or max(all_dates)
# 1) Per-bucket flow counters (created / closed) within the period.
created_counter: Counter = Counter()
closed_counter: Counter = Counter()
for t in tickets:
c = _parseIsoDate(t.createdOn)
if c and _inPeriod(c, periodFrom, periodTo):
created_counter[_bucketKey(c, bucket)] += 1
if t.isClosed:
u = _parseIsoDate(t.updatedOn)
if u and _inPeriod(u, periodFrom, periodTo):
closed_counter[_bucketKey(u, bucket)] += 1
# 2) Build the contiguous list of bucket keys spanning [from, to] so
# the line chart has a stable x-axis even for empty intervals.
bucket_keys = _bucketKeysBetween(periodFrom, periodTo, bucket)
if not bucket_keys:
return []
# 3) Snapshot counts: total = #created with createdOn <= bucket end;
# open = total - #closed with closedTs <= bucket end. We compute
# against ALL tickets (not just the period-windowed counters) so
# pre-period tickets are correctly counted in the snapshot.
created_dates: List[_dt.datetime] = []
closed_dates: List[_dt.datetime] = []
for t in tickets:
c = _parseIsoDate(t.createdOn)
if c:
created_dates.append(c)
if t.isClosed:
u = _parseIsoDate(t.updatedOn)
if u:
closed_dates.append(u)
created_dates.sort()
closed_dates.sort()
out: List[RedmineThroughputBucket] = []
for key in bucket_keys:
edge = _bucketEnd(key, bucket)
cum_total = _countLE(created_dates, edge)
cum_closed = _countLE(closed_dates, edge)
cum_open = max(0, cum_total - cum_closed)
out.append(
RedmineThroughputBucket(
bucketKey=key,
label=_bucketLabel(key, bucket),
created=int(created_counter.get(key, 0)),
closed=int(closed_counter.get(key, 0)),
cumTotal=int(cum_total),
cumOpen=int(cum_open),
)
)
return out
def _countLE(sortedDates: List[_dt.datetime], edge: _dt.datetime) -> int:
"""Binary search: how many entries in ``sortedDates`` are <= ``edge``."""
return bisect.bisect_right(sortedDates, edge)
def _bucketKeysBetween(
fromD: _dt.datetime, toD: _dt.datetime, bucket: str
) -> List[str]:
"""Inclusive list of bucket keys covering ``[fromD, toD]``."""
if toD < fromD:
return []
keys: List[str] = []
seen: set[str] = set()
cursor = fromD
safety = 0
step = (
_dt.timedelta(days=1) if bucket == "day"
else _dt.timedelta(days=7) if bucket == "week"
else _dt.timedelta(days=27) # month: walk in <31d steps so we never skip
)
while cursor <= toD and safety < 5000:
k = _bucketKey(cursor, bucket)
if k not in seen:
seen.add(k)
keys.append(k)
cursor += step
safety += 1
# Guarantee the toD bucket is included (loop's last cursor may be < toD
# if step doesn't divide the interval cleanly, esp. for months).
last_key = _bucketKey(toD, bucket)
if last_key not in seen:
keys.append(last_key)
keys.sort()
return keys
def _bucketEnd(key: str, bucket: str) -> _dt.datetime:
"""Last-instant timestamp covered by the given bucket key."""
if bucket == "day":
d = _dt.datetime.strptime(key, "%Y-%m-%d")
return d.replace(hour=23, minute=59, second=59)
if bucket == "month":
d = _dt.datetime.strptime(key, "%Y-%m")
# First of next month minus one second.
if d.month == 12:
nxt = d.replace(year=d.year + 1, month=1)
else:
nxt = d.replace(month=d.month + 1)
return nxt - _dt.timedelta(seconds=1)
# week: ISO format ``YYYY-Www``. End = Sunday 23:59:59 of that week.
try:
year_str, week_str = key.split("-W")
year = int(year_str)
week = int(week_str)
# ``%G-%V-%u`` parses ISO year/week/day; %u=1 is Monday.
monday = _dt.datetime.strptime(f"{year}-{week:02d}-1", "%G-%V-%u")
return monday + _dt.timedelta(days=6, hours=23, minutes=59, seconds=59)
except Exception:
return _utcNow()
def _topAssignees(
tickets: List[RedmineTicketDto], *, limit: int = 10
) -> List[RedmineAssigneeBucket]:
by_assignee: Dict[Tuple[Optional[int], str], int] = defaultdict(int)
for t in tickets:
if t.isClosed:
continue
key = (t.assignedToId, t.assignedToName or "(nicht zugewiesen)")
by_assignee[key] += 1
sorted_items = sorted(by_assignee.items(), key=lambda kv: kv[1], reverse=True)[:limit]
return [
RedmineAssigneeBucket(assignedToId=k[0], name=k[1], open=v)
for k, v in sorted_items
]
def _relationDistribution(
tickets: List[RedmineTicketDto],
) -> List[RedmineRelationDistributionEntry]:
seen: set[int] = set()
counter: Counter = Counter()
for t in tickets:
for r in t.relations:
if r.id in seen:
continue
seen.add(r.id)
counter[r.relationType or "relates"] += 1
return [
RedmineRelationDistributionEntry(relationType=k, count=v)
for k, v in sorted(counter.items(), key=lambda kv: kv[1], reverse=True)
]
def _backlogAging(
tickets: List[RedmineTicketDto], *, now: Optional[_dt.datetime] = None
) -> List[RedmineAgingBucket]:
if now is None:
now = _utcNow()
buckets = [
RedmineAgingBucket(bucketKey="lt7", label="< 7 Tage", minDays=0, maxDays=7),
RedmineAgingBucket(bucketKey="7-30", label="7-30 Tage", minDays=7, maxDays=30),
RedmineAgingBucket(bucketKey="30-90", label="30-90 Tage", minDays=30, maxDays=90),
RedmineAgingBucket(bucketKey="90-180", label="90-180 Tage", minDays=90, maxDays=180),
RedmineAgingBucket(bucketKey="gt180", label="> 180 Tage", minDays=180, maxDays=None),
]
for t in tickets:
if t.isClosed:
continue
ref = _parseIsoDate(t.updatedOn) or _parseIsoDate(t.createdOn)
if ref is None:
continue
age_days = max(0, (now - ref).days)
for b in buckets:
if (b.maxDays is None and age_days >= b.minDays) or (
b.maxDays is not None and b.minDays <= age_days < b.maxDays
):
b.count += 1
break
return buckets
# ---------------------------------------------------------------------------
# Date helpers (no external deps)
# ---------------------------------------------------------------------------
def _utcNow() -> _dt.datetime:
"""Naive UTC ``datetime`` -- the rest of the helpers compare naive
objects, so we strip tz info on purpose."""
return _dt.datetime.now(_dt.timezone.utc).replace(tzinfo=None)
def _parseIsoDate(value: Optional[str]) -> Optional[_dt.datetime]:
if not value:
return None
try:
s = value.replace("Z", "+00:00") if isinstance(value, str) else value
if isinstance(s, str) and "T" not in s and len(s) == 10:
return _dt.datetime.strptime(s, "%Y-%m-%d")
return _dt.datetime.fromisoformat(s).replace(tzinfo=None)
except Exception:
try:
return _dt.datetime.strptime(str(value)[:10], "%Y-%m-%d")
except Exception:
return None
def _inPeriod(
when: _dt.datetime,
fromDate: Optional[_dt.datetime],
toDate: Optional[_dt.datetime],
) -> bool:
if fromDate and when < fromDate:
return False
if toDate and when > toDate + _dt.timedelta(days=1):
return False
return True
def _bucketKey(when: _dt.datetime, bucket: str) -> str:
if bucket == "day":
return when.strftime("%Y-%m-%d")
if bucket == "month":
return when.strftime("%Y-%m")
iso_year, iso_week, _ = when.isocalendar()
return f"{iso_year}-W{iso_week:02d}"
def _bucketLabel(key: str, bucket: str) -> str:
if bucket == "day":
return key
if bucket == "month":
try:
d = _dt.datetime.strptime(key, "%Y-%m")
return d.strftime("%b %Y")
except Exception:
return key
return key

View file

@ -0,0 +1,131 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""TTL-based in-memory cache for ``serviceRedmineStats`` results.
The cache key is ``(featureInstanceId, dateFrom, dateTo, bucket, sorted(trackerIds))``.
Any write through ``serviceRedmine`` (createIssue, updateIssue, deleteIssue,
addRelation, deleteRelation) MUST call :func:`invalidateInstance` to drop
all cached entries for that feature instance.
Default TTL: 90 seconds. Override at construction or via ``setTtl``.
"""
from __future__ import annotations
import threading
import time
from dataclasses import dataclass
from typing import Any, Dict, Iterable, Optional, Tuple
_DEFAULT_TTL_SECONDS = 90.0
def _freeze(value: Any) -> Any:
"""Make ``value`` hashable so it can live in a tuple cache key.
Lists / sets become sorted tuples; dicts become sorted item tuples;
everything else is returned untouched.
"""
if isinstance(value, (list, set, tuple)):
try:
return tuple(sorted(value))
except TypeError:
return tuple(value)
if isinstance(value, dict):
return tuple(sorted(value.items()))
return value
@dataclass
class _CacheEntry:
value: Any
expiresAt: float
CacheKey = Tuple[str, Optional[str], Optional[str], str, Tuple[int, ...], Tuple[Any, ...]]
class RedmineStatsCache:
"""Thread-safe TTL cache."""
def __init__(self, ttlSeconds: float = _DEFAULT_TTL_SECONDS) -> None:
self._ttlSeconds = float(ttlSeconds)
self._store: Dict[CacheKey, _CacheEntry] = {}
self._lock = threading.Lock()
def setTtl(self, ttlSeconds: float) -> None:
self._ttlSeconds = float(ttlSeconds)
@staticmethod
def buildKey(
featureInstanceId: str,
dateFrom: Optional[str],
dateTo: Optional[str],
bucket: str,
trackerIds: Iterable[int],
*extraDims: Any,
) -> CacheKey:
"""Build a cache key for the given query.
``extraDims`` is an open-ended tail so callers can add more filter
dimensions (e.g. ``categoryIds``, ``statusFilter``) without forcing
a signature break here. Pass them as already-canonicalised values
(sorted lists, normalised strings, ...) so the same query always
produces the same key.
"""
return (
str(featureInstanceId),
dateFrom or None,
dateTo or None,
(bucket or "week").lower(),
tuple(sorted(int(t) for t in trackerIds or [])),
tuple(_freeze(d) for d in extraDims),
)
def get(self, key: CacheKey) -> Optional[Any]:
now = time.monotonic()
with self._lock:
entry = self._store.get(key)
if not entry:
return None
if entry.expiresAt < now:
self._store.pop(key, None)
return None
return entry.value
def set(self, key: CacheKey, value: Any, *, ttlSeconds: Optional[float] = None) -> None:
ttl = float(ttlSeconds) if ttlSeconds is not None else self._ttlSeconds
with self._lock:
self._store[key] = _CacheEntry(value=value, expiresAt=time.monotonic() + ttl)
def invalidateInstance(self, featureInstanceId: str) -> int:
"""Drop every entry whose key starts with ``featureInstanceId``.
Returns the number of entries dropped.
"""
target = str(featureInstanceId)
with self._lock:
to_drop = [k for k in self._store.keys() if k[0] == target]
for k in to_drop:
self._store.pop(k, None)
return len(to_drop)
def clear(self) -> None:
with self._lock:
self._store.clear()
def size(self) -> int:
with self._lock:
return len(self._store)
_globalCache: Optional[RedmineStatsCache] = None
def _getStatsCache() -> RedmineStatsCache:
"""Process-wide singleton."""
global _globalCache
if _globalCache is None:
_globalCache = RedmineStatsCache()
return _globalCache

View file

@ -0,0 +1,323 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Incremental Redmine -> ``poweron_redmine`` mirror sync.
Strategy:
- **Full sync** when ``RedmineInstanceConfig.lastSyncAt`` is None or
``force=True`` is requested. Pulls every issue with ``status_id=*``
(open + closed) for the configured project, paginated.
- **Incremental sync** otherwise. Pulls only issues whose ``updated_on``
is greater than ``lastSyncAt - overlapSeconds`` (default 1h overlap to
catch clock skew and missed updates).
- Each issue is upserted into ``RedmineTicketMirror`` (looked up by
``(featureInstanceId, redmineId)``).
- The full set of relations attached to each issue replaces any existing
relation rows for that issue in ``RedmineRelationMirror``.
Concurrency: a per-instance ``asyncio.Lock`` prevents two concurrent
syncs for the same feature instance.
After every successful sync the in-memory stats cache is invalidated for
the instance.
"""
from __future__ import annotations
import asyncio
import logging
import time
from typing import Any, Dict, List, Optional
from modules.connectors.connectorTicketsRedmine import RedmineApiError
from modules.datamodels.datamodelUam import User
from modules.features.redmine.datamodelRedmine import (
RedmineInstanceConfig,
RedmineRelationMirror,
RedmineSyncResultDto,
RedmineSyncStatusDto,
RedmineTicketMirror,
)
from modules.features.redmine.interfaceFeatureRedmine import getInterface
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
logger = logging.getLogger(__name__)
_INCREMENTAL_OVERLAP_SECONDS = 60 * 60 # 1h overlap on incremental syncs
_DEFAULT_PAGE_SIZE = 100
_MAX_PAGES_SAFETY = 5000 # 500k tickets safety cap
_locks: Dict[str, asyncio.Lock] = {}
def _lockFor(featureInstanceId: str) -> asyncio.Lock:
if featureInstanceId not in _locks:
_locks[featureInstanceId] = asyncio.Lock()
return _locks[featureInstanceId]
# ---------------------------------------------------------------------------
# Public API
# ---------------------------------------------------------------------------
async def runSync(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
*,
force: bool = False,
pageSize: int = _DEFAULT_PAGE_SIZE,
) -> RedmineSyncResultDto:
"""Run a (full or incremental) sync for the given feature instance."""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
connector = iface.resolveConnector(featureInstanceId)
cfg = iface.getConfig(featureInstanceId)
if not connector or not cfg:
raise RuntimeError(
f"Redmine instance {featureInstanceId} is not configured or inactive"
)
async with _lockFor(featureInstanceId):
started = time.monotonic()
full = force or cfg.lastSyncAt is None
updated_from_iso: Optional[str] = None
if not full and cfg.lastSyncAt is not None:
cursor_epoch = max(0.0, cfg.lastSyncAt - _INCREMENTAL_OVERLAP_SECONDS)
updated_from_iso = time.strftime(
"%Y-%m-%dT%H:%M:%SZ", time.gmtime(cursor_epoch)
)
try:
issues = await connector.listAllIssues(
statusId="*",
updatedOnFrom=updated_from_iso,
pageSize=pageSize,
maxPages=_MAX_PAGES_SAFETY,
include=["relations"],
)
except RedmineApiError as e:
iface.recordSyncFailure(featureInstanceId, str(e))
raise
tickets_upserted = 0
relations_upserted = 0
now_epoch = time.time()
for issue in issues:
tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
duration_ms = int((time.monotonic() - started) * 1000)
iface.recordSyncSuccess(
featureInstanceId,
full=full,
ticketsUpserted=tickets_upserted,
durationMs=duration_ms,
lastSyncAt=now_epoch,
)
_getStatsCache().invalidateInstance(featureInstanceId)
return RedmineSyncResultDto(
instanceId=featureInstanceId,
full=full,
ticketsUpserted=tickets_upserted,
relationsUpserted=relations_upserted,
durationMs=duration_ms,
lastSyncAt=now_epoch,
)
def getSyncStatus(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
) -> RedmineSyncStatusDto:
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
cfg = iface.getConfig(featureInstanceId)
ticket_count = iface.countMirroredTickets(featureInstanceId)
relation_count = iface.countMirroredRelations(featureInstanceId)
return RedmineSyncStatusDto(
instanceId=featureInstanceId,
lastSyncAt=cfg.lastSyncAt if cfg else None,
lastFullSyncAt=cfg.lastFullSyncAt if cfg else None,
lastSyncDurationMs=cfg.lastSyncDurationMs if cfg else None,
lastSyncTicketCount=cfg.lastSyncTicketCount if cfg else None,
lastSyncErrorAt=cfg.lastSyncErrorAt if cfg else None,
lastSyncErrorMessage=cfg.lastSyncErrorMessage if cfg else None,
mirroredTicketCount=ticket_count,
mirroredRelationCount=relation_count,
)
async def upsertSingleTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
) -> int:
"""Re-fetch one issue from Redmine and upsert it into the mirror.
Used by the write paths in ``serviceRedmine`` so the mirror stays
consistent after every create / update without a full sync.
Returns the number of relation rows replaced.
"""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
connector = iface.resolveConnector(featureInstanceId)
if not connector:
raise RuntimeError("Redmine instance not configured")
issue = await connector.getIssue(int(issueId), includeRelations=True)
now_epoch = time.time()
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
_getStatsCache().invalidateInstance(featureInstanceId)
return relations_upserted
def deleteMirroredTicket(
currentUser: User,
mandateId: Optional[str],
featureInstanceId: str,
issueId: int,
) -> bool:
"""Drop a ticket and its relations from the mirror after a successful Redmine DELETE."""
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
_getStatsCache().invalidateInstance(featureInstanceId)
return deleted
# ---------------------------------------------------------------------------
# Per-issue upsert helpers (sync, run inside the per-instance lock)
# ---------------------------------------------------------------------------
def _upsertTicket(
iface,
featureInstanceId: str,
mandateId: Optional[str],
issue: Dict[str, Any],
nowEpoch: float,
) -> int:
redmine_id = issue.get("id")
if redmine_id is None:
return 0
statuses_lookup = (iface.getConfig(featureInstanceId).schemaCache or {}).get("statuses") or []
is_closed = _statusIsClosed(issue.get("status") or {}, statuses_lookup)
record = _ticketRecordFromIssue(issue, featureInstanceId, mandateId, is_closed, nowEpoch)
iface.upsertMirroredTicket(featureInstanceId, int(redmine_id), record)
return 1
def _replaceRelations(
iface,
featureInstanceId: str,
issue: Dict[str, Any],
nowEpoch: float,
) -> int:
issue_id = issue.get("id")
relations = issue.get("relations") or []
if issue_id is None:
return 0
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issue_id))
inserted = 0
for r in relations:
rid = r.get("id")
if rid is None:
continue
iface.insertMirroredRelation(
featureInstanceId,
{
"featureInstanceId": featureInstanceId,
"redmineRelationId": int(rid),
"issueId": int(r.get("issue_id") or 0),
"issueToId": int(r.get("issue_to_id") or 0),
"relationType": str(r.get("relation_type") or "relates"),
"delay": r.get("delay"),
"syncedAt": nowEpoch,
},
)
inserted += 1
return inserted
# ---------------------------------------------------------------------------
# Pure helpers
# ---------------------------------------------------------------------------
def _statusIsClosed(status: Dict[str, Any], statusesLookup: List[Dict[str, Any]]) -> bool:
"""Best-effort: prefer the schemaCache; fall back to inspecting the
raw issue (Redmine sets ``is_closed`` on the status object only when
explicitly requested)."""
sid = status.get("id")
if sid is None:
return False
for s in statusesLookup:
if s.get("id") == sid:
return bool(s.get("isClosed"))
return bool(status.get("is_closed"))
def _parseRedmineDateToEpoch(value: Optional[str]) -> Optional[float]:
if not value:
return None
try:
from datetime import datetime
s = value.replace("Z", "+00:00")
return datetime.fromisoformat(s).timestamp()
except Exception:
return None
def _ticketRecordFromIssue(
issue: Dict[str, Any],
featureInstanceId: str,
mandateId: Optional[str],
isClosed: bool,
nowEpoch: float,
) -> Dict[str, Any]:
tracker = issue.get("tracker") or {}
status = issue.get("status") or {}
priority = issue.get("priority") or {}
assigned = issue.get("assigned_to") or {}
author = issue.get("author") or {}
parent = issue.get("parent") or {}
fixed_version = issue.get("fixed_version") or {}
category = issue.get("category") or {}
created_on = issue.get("created_on")
updated_on = issue.get("updated_on")
updated_ts = _parseRedmineDateToEpoch(updated_on)
return {
"featureInstanceId": featureInstanceId,
"mandateId": mandateId,
"redmineId": int(issue.get("id")),
"subject": str(issue.get("subject") or ""),
"description": str(issue.get("description") or ""),
"trackerId": tracker.get("id"),
"trackerName": tracker.get("name"),
"statusId": status.get("id"),
"statusName": status.get("name"),
"isClosed": bool(isClosed),
"priorityId": priority.get("id"),
"priorityName": priority.get("name"),
"assignedToId": assigned.get("id"),
"assignedToName": assigned.get("name"),
"authorId": author.get("id"),
"authorName": author.get("name"),
"parentId": parent.get("id"),
"fixedVersionId": fixed_version.get("id"),
"fixedVersionName": fixed_version.get("name"),
"categoryId": category.get("id"),
"categoryName": category.get("name"),
"createdOn": created_on,
"updatedOn": updated_on,
"createdOnTs": _parseRedmineDateToEpoch(created_on),
"updatedOnTs": updated_ts,
# Approximation: Redmine doesn't expose a dedicated "closed_on"
# timestamp via the issue endpoint. For closed tickets the last
# updatedOn is the best stable proxy without scanning journals.
"closedOnTs": updated_ts if bool(isClosed) else None,
"customFields": list(issue.get("custom_fields") or []),
"raw": issue,
"syncedAt": nowEpoch,
}

View file

@ -8,28 +8,30 @@ Handles feature initialization and RBAC catalog registration.
import logging
from typing import Dict, List, Any
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
# Feature metadata
FEATURE_CODE = "teamsbot"
FEATURE_LABEL = "Teams Bot"
FEATURE_LABEL = t("Teams Bot", context="UI")
FEATURE_ICON = "mdi-headset"
# UI Objects for RBAC catalog
UI_OBJECTS = [
{
"objectKey": "ui.feature.teamsbot.dashboard",
"label": "Dashboard",
"label": t("Dashboard", context="UI"),
"meta": {"area": "dashboard"}
},
{
"objectKey": "ui.feature.teamsbot.sessions",
"label": "Sitzungen",
"label": t("Sitzungen", context="UI"),
"meta": {"area": "sessions"}
},
{
"objectKey": "ui.feature.teamsbot.settings",
"label": "Einstellungen",
"label": t("Einstellungen", context="UI"),
"meta": {"area": "settings", "admin_only": True}
},
]
@ -38,7 +40,7 @@ UI_OBJECTS = [
DATA_OBJECTS = [
{
"objectKey": "data.feature.teamsbot.TeamsbotSession",
"label": "Sitzung",
"label": t("Sitzung", context="UI"),
"meta": {
"table": "TeamsbotSession",
"fields": ["id", "meetingLink", "botName", "status", "startedAt", "endedAt"],
@ -48,7 +50,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.teamsbot.TeamsbotTranscript",
"label": "Transkript",
"label": t("Transkript", context="UI"),
"meta": {
"table": "TeamsbotTranscript",
"fields": ["id", "sessionId", "speaker", "text", "timestamp"],
@ -58,7 +60,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.teamsbot.TeamsbotBotResponse",
"label": "Bot-Antwort",
"label": t("Bot-Antwort", context="UI"),
"meta": {
"table": "TeamsbotBotResponse",
"fields": ["id", "sessionId", "responseText", "detectedIntent"],
@ -68,7 +70,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.teamsbot.*",
"label": "Alle Teams Bot Daten",
"label": t("Alle Teams Bot Daten", context="UI"),
"meta": {"wildcard": True, "description": "Wildcard for all teamsbot data tables"}
},
]
@ -77,22 +79,22 @@ DATA_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.teamsbot.session.start",
"label": "Sitzung starten",
"label": t("Sitzung starten", context="UI"),
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions", "method": "POST"}
},
{
"objectKey": "resource.feature.teamsbot.session.stop",
"label": "Sitzung beenden",
"label": t("Sitzung beenden", context="UI"),
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}/stop", "method": "POST"}
},
{
"objectKey": "resource.feature.teamsbot.session.delete",
"label": "Sitzung löschen",
"label": t("Sitzung löschen", context="UI"),
"meta": {"endpoint": "/api/teamsbot/{instanceId}/sessions/{sessionId}", "method": "DELETE"}
},
{
"objectKey": "resource.feature.teamsbot.config.edit",
"label": "Konfiguration bearbeiten",
"label": t("Konfiguration bearbeiten", context="UI"),
"meta": {"endpoint": "/api/teamsbot/{instanceId}/config", "method": "PUT", "admin_only": True}
},
]

View file

@ -526,7 +526,8 @@ class TrusteePosition(PowerOnModel):
"label": "Buchungsbetrag",
"frontend_type": "number",
"frontend_readonly": False,
"frontend_required": True
"frontend_required": True,
"frontend_format": "R:#'###.00",
}
)
originalCurrency: str = Field(
@ -551,7 +552,8 @@ class TrusteePosition(PowerOnModel):
"label": "Originalbetrag",
"frontend_type": "number",
"frontend_readonly": False,
"frontend_required": True
"frontend_required": True,
"frontend_format": "R:#'###.00",
}
)
vatPercentage: float = Field(
@ -561,7 +563,8 @@ class TrusteePosition(PowerOnModel):
"label": "MwSt-Prozentsatz",
"frontend_type": "number",
"frontend_readonly": False,
"frontend_required": False
"frontend_required": False,
"frontend_format": "R:0.00",
}
)
vatAmount: float = Field(
@ -571,7 +574,8 @@ class TrusteePosition(PowerOnModel):
"label": "MwSt-Betrag",
"frontend_type": "number",
"frontend_readonly": False,
"frontend_required": False
"frontend_required": False,
"frontend_format": "R:#'###.00",
}
)
debitAccountNumber: Optional[str] = Field(
@ -750,7 +754,15 @@ class TrusteeDataJournalEntry(PowerOnModel):
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
totalAmount: float = Field(default=0.0, description="Total amount of entry", json_schema_extra={"label": "Betrag"})
totalAmount: float = Field(
default=0.0,
description="Total amount of entry",
json_schema_extra={
"label": "Betrag",
# Right-aligned amount with Swiss thousands separator and 2 decimals.
"frontend_format": "R:#'###.00",
},
)
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
@ -760,8 +772,8 @@ class TrusteeDataJournalLine(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll"})
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben"})
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"})
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
@ -794,10 +806,10 @@ class TrusteeDataAccountBalance(PowerOnModel):
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
periodYear: int = Field(description="Fiscal year", json_schema_extra={"label": "Jahr"})
periodMonth: int = Field(default=0, description="Month (1-12); 0 = annual total", json_schema_extra={"label": "Monat"})
openingBalance: float = Field(default=0.0, json_schema_extra={"label": "Eröffnungssaldo"})
debitTotal: float = Field(default=0.0, json_schema_extra={"label": "Soll-Umsatz"})
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz"})
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo"})
openingBalance: float = Field(default=0.0, json_schema_extra={"label": "Eröffnungssaldo", "frontend_format": "R:#'###.00"})
debitTotal: float = Field(default=0.0, json_schema_extra={"label": "Soll-Umsatz", "frontend_format": "R:#'###.00"})
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"})
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})

View file

@ -8,11 +8,13 @@ Handles feature initialization and RBAC catalog registration.
import logging
from typing import Dict, List, Any
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
# Feature metadata
FEATURE_CODE = "trustee"
FEATURE_LABEL = "Treuhand"
FEATURE_LABEL = t("Treuhand", context="UI")
FEATURE_ICON = "mdi-briefcase"
# UI Objects for RBAC catalog
@ -20,7 +22,7 @@ FEATURE_ICON = "mdi-briefcase"
UI_OBJECTS = [
{
"objectKey": "ui.feature.trustee.dashboard",
"label": "Dashboard",
"label": t("Dashboard", context="UI"),
"meta": {"area": "dashboard"}
},
# Note: ui.feature.trustee.positions and .documents removed.
@ -30,32 +32,32 @@ UI_OBJECTS = [
# remains and continues to gate per-row access.
{
"objectKey": "ui.feature.trustee.data-tables",
"label": "Daten-Tabellen",
"label": t("Daten-Tabellen", context="UI"),
"meta": {"area": "data-tables"}
},
{
"objectKey": "ui.feature.trustee.import-process",
"label": "Import & Verarbeitung",
"label": t("Import & Verarbeitung", context="UI"),
"meta": {"area": "import-process"}
},
{
"objectKey": "ui.feature.trustee.analyse",
"label": "Analyse & Reporting",
"label": t("Analyse & Reporting", context="UI"),
"meta": {"area": "analyse"}
},
{
"objectKey": "ui.feature.trustee.abschluss",
"label": "Abschluss & Prüfung",
"label": t("Abschluss & Prüfung", context="UI"),
"meta": {"area": "abschluss"}
},
{
"objectKey": "ui.feature.trustee.settings",
"label": "Buchhaltungs-Einstellungen",
"label": t("Buchhaltungs-Einstellungen", context="UI"),
"meta": {"area": "settings", "admin_only": True}
},
{
"objectKey": "ui.feature.trustee.instance-roles",
"label": "Instanz-Rollen & Berechtigungen",
"label": t("Instanz-Rollen & Berechtigungen", context="UI"),
"meta": {"area": "admin", "admin_only": True}
},
]
@ -69,23 +71,23 @@ DATA_OBJECTS = [
# ── Categorical Groups (UDB folders) ─────────────────────────────────────
{
"objectKey": "data.feature.trustee.localData",
"label": "Lokale Daten",
"label": t("Lokale Daten", context="UI"),
"meta": {"isGroup": True}
},
{
"objectKey": "data.feature.trustee.config",
"label": "Konfiguration",
"label": t("Konfiguration", context="UI"),
"meta": {"isGroup": True}
},
{
"objectKey": "data.feature.trustee.accountingData",
"label": "Daten aus Buchhaltungssystem",
"label": t("Daten aus Buchhaltungssystem", context="UI"),
"meta": {"isGroup": True}
},
# ── Lokale Daten ─────────────────────────────────────────────────────────
{
"objectKey": "data.feature.trustee.TrusteePosition",
"label": "Position",
"label": t("Position", context="UI"),
"meta": {
"table": "TrusteePosition",
"group": "data.feature.trustee.localData",
@ -94,7 +96,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeDocument",
"label": "Dokument",
"label": t("Dokument", context="UI"),
"meta": {
"table": "TrusteeDocument",
"group": "data.feature.trustee.localData",
@ -104,7 +106,7 @@ DATA_OBJECTS = [
# ── Konfiguration ────────────────────────────────────────────────────────
{
"objectKey": "data.feature.trustee.TrusteeAccountingConfig",
"label": "Buchhaltungs-Verbindung",
"label": t("Buchhaltungs-Verbindung", context="UI"),
"meta": {
"table": "TrusteeAccountingConfig",
"group": "data.feature.trustee.config",
@ -113,7 +115,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeAccountingSync",
"label": "Sync-Protokoll",
"label": t("Sync-Protokoll", context="UI"),
"meta": {
"table": "TrusteeAccountingSync",
"group": "data.feature.trustee.config",
@ -123,7 +125,7 @@ DATA_OBJECTS = [
# ── Daten aus Buchhaltungssystem ─────────────────────────────────────────
{
"objectKey": "data.feature.trustee.TrusteeDataAccount",
"label": "Kontenplan",
"label": t("Kontenplan", context="UI"),
"meta": {
"table": "TrusteeDataAccount",
"group": "data.feature.trustee.accountingData",
@ -132,7 +134,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeDataJournalEntry",
"label": "Buchungen",
"label": t("Buchungen", context="UI"),
"meta": {
"table": "TrusteeDataJournalEntry",
"group": "data.feature.trustee.accountingData",
@ -141,7 +143,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeDataJournalLine",
"label": "Buchungszeilen",
"label": t("Buchungszeilen", context="UI"),
"meta": {
"table": "TrusteeDataJournalLine",
"group": "data.feature.trustee.accountingData",
@ -150,7 +152,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeDataContact",
"label": "Kontakte",
"label": t("Kontakte", context="UI"),
"meta": {
"table": "TrusteeDataContact",
"group": "data.feature.trustee.accountingData",
@ -159,7 +161,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.TrusteeDataAccountBalance",
"label": "Kontosalden",
"label": t("Kontosalden", context="UI"),
"meta": {
"table": "TrusteeDataAccountBalance",
"group": "data.feature.trustee.accountingData",
@ -168,7 +170,7 @@ DATA_OBJECTS = [
},
{
"objectKey": "data.feature.trustee.*",
"label": "Alle Treuhand-Daten",
"label": t("Alle Treuhand-Daten", context="UI"),
"meta": {"wildcard": True, "description": "Wildcard for all trustee data tables"}
},
]
@ -178,67 +180,67 @@ DATA_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.trustee.documents.create",
"label": "Dokument hochladen",
"label": t("Dokument hochladen", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/documents", "method": "POST"}
},
{
"objectKey": "resource.feature.trustee.documents.update",
"label": "Dokument aktualisieren",
"label": t("Dokument aktualisieren", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "PUT"}
},
{
"objectKey": "resource.feature.trustee.documents.delete",
"label": "Dokument löschen",
"label": t("Dokument löschen", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/documents/{documentId}", "method": "DELETE"}
},
{
"objectKey": "resource.feature.trustee.positions.create",
"label": "Position erstellen",
"label": t("Position erstellen", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/positions", "method": "POST"}
},
{
"objectKey": "resource.feature.trustee.positions.update",
"label": "Position aktualisieren",
"label": t("Position aktualisieren", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "PUT"}
},
{
"objectKey": "resource.feature.trustee.positions.delete",
"label": "Position löschen",
"label": t("Position löschen", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/positions/{positionId}", "method": "DELETE"}
},
{
"objectKey": "resource.feature.trustee.instance-roles.manage",
"label": "Instanz-Rollen verwalten",
"label": t("Instanz-Rollen verwalten", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/instance-roles", "method": "ALL", "admin_only": True}
},
{
"objectKey": "resource.feature.trustee.accounting.manage",
"label": "Buchhaltungs-Integration verwalten",
"label": t("Buchhaltungs-Integration verwalten", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/config", "method": "ALL", "admin_only": True}
},
{
"objectKey": "resource.feature.trustee.accounting.sync",
"label": "Buchhaltung synchronisieren",
"label": t("Buchhaltung synchronisieren", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync", "method": "POST"}
},
{
"objectKey": "resource.feature.trustee.accounting.view",
"label": "Sync-Status einsehen",
"label": t("Sync-Status einsehen", context="UI"),
"meta": {"endpoint": "/api/trustee/{instanceId}/accounting/sync-status", "method": "GET"}
},
{
"objectKey": "resource.feature.trustee.workflows.view",
"label": "Workflows einsehen",
"label": t("Workflows einsehen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "GET"}
},
{
"objectKey": "resource.feature.trustee.workflows.execute",
"label": "Workflows ausführen",
"label": t("Workflows ausführen", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/execute", "method": "POST"}
},
{
"objectKey": "resource.feature.trustee.workflows.manage",
"label": "Workflows verwalten",
"label": t("Workflows verwalten", context="UI"),
"meta": {"endpoint": "/api/workflows/{instanceId}/workflows", "method": "ALL", "admin_only": True}
},
]
@ -256,7 +258,7 @@ QUICK_ACTION_CATEGORIES = [
QUICK_ACTIONS = [
{
"id": "trustee-process-receipts",
"label": "Belege verarbeiten",
"label": t("Belege verarbeiten", context="UI"),
"description": "Belege aus SharePoint importieren, klassifizieren und verbuchen",
"icon": "mdi-file-document-check-outline",
"color": "#4CAF50",
@ -268,7 +270,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-upload-receipt",
"label": "Beleg hochladen",
"label": t("Beleg hochladen", context="UI"),
"description": "Beleg scannen oder als Datei hochladen",
"icon": "mdi-camera-document-outline",
"color": "#607D8B",
@ -280,7 +282,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-sync-accounting",
"label": "Daten einlesen",
"label": t("Daten einlesen", context="UI"),
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
"icon": "mdi-sync",
"color": "#FF9800",
@ -292,7 +294,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-budget-comparison",
"label": "Budget-Vergleich",
"label": t("Budget-Vergleich", context="UI"),
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
"icon": "mdi-chart-bar",
"color": "#2196F3",
@ -304,7 +306,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-kpi-dashboard",
"label": "KPI-Dashboard",
"label": t("KPI-Dashboard", context="UI"),
"description": "Kennzahlen berechnen und visualisieren",
"icon": "mdi-view-dashboard-outline",
"color": "#9C27B0",
@ -316,7 +318,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-cashflow",
"label": "Cashflow-Rechnung",
"label": t("Cashflow-Rechnung", context="UI"),
"description": "Cashflow berechnen und analysieren",
"icon": "mdi-cash-multiple",
"color": "#009688",
@ -328,7 +330,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-forecast",
"label": "Prognose erstellen",
"label": t("Prognose erstellen", context="UI"),
"description": "Trend-Analyse und Prognose der nächsten Monate",
"icon": "mdi-chart-timeline-variant",
"color": "#E91E63",
@ -340,7 +342,7 @@ QUICK_ACTIONS = [
},
{
"id": "trustee-year-end-check",
"label": "Jahresabschluss prüfen",
"label": t("Jahresabschluss prüfen", context="UI"),
"description": "Automatische Prüfungen für den Jahresabschluss",
"icon": "mdi-clipboard-check-outline",
"color": "#795548",
@ -383,7 +385,7 @@ def _buildAnalysisWorkflowGraph(prompt: str) -> Dict[str, Any]:
TEMPLATE_WORKFLOWS = [
{
"id": "trustee-receipt-import",
"label": "Beleg-Import Pipeline",
"label": t("Beleg-Import Pipeline", context="UI"),
"description": "Belege extrahieren, verarbeiten und in Buchhaltung synchronisieren",
"tags": ["feature:trustee", "template:trustee-receipt-import"],
"graph": {
@ -405,7 +407,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-sync-accounting",
"label": "Buchhaltung synchronisieren",
"label": t("Buchhaltung synchronisieren", context="UI"),
"description": "Buchhaltungsdaten aus dem externen System aktualisieren",
"tags": ["feature:trustee", "template:trustee-sync-accounting"],
"graph": {
@ -421,7 +423,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-budget-comparison",
"label": "Budget-Vergleich",
"label": t("Budget-Vergleich", context="UI"),
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
"tags": ["feature:trustee", "template:trustee-budget-comparison"],
"graph": {
@ -454,7 +456,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-kpi-dashboard",
"label": "KPI-Dashboard",
"label": t("KPI-Dashboard", context="UI"),
"description": "Kennzahlen berechnen und visualisieren",
"tags": ["feature:trustee", "template:trustee-kpi-dashboard"],
"graph": _buildAnalysisWorkflowGraph(
@ -471,7 +473,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-cashflow",
"label": "Cashflow-Rechnung",
"label": t("Cashflow-Rechnung", context="UI"),
"description": "Cashflow berechnen und analysieren",
"tags": ["feature:trustee", "template:trustee-cashflow"],
"graph": _buildAnalysisWorkflowGraph(
@ -485,7 +487,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-forecast",
"label": "Prognose erstellen",
"label": t("Prognose erstellen", context="UI"),
"description": "Trend-Analyse und Prognose der nächsten Monate",
"tags": ["feature:trustee", "template:trustee-forecast"],
"graph": _buildAnalysisWorkflowGraph(
@ -500,7 +502,7 @@ TEMPLATE_WORKFLOWS = [
},
{
"id": "trustee-year-end-check",
"label": "Jahresabschluss prüfen",
"label": t("Jahresabschluss prüfen", context="UI"),
"description": "Automatische Prüfungen für den Jahresabschluss",
"tags": ["feature:trustee", "template:trustee-year-end-check"],
"graph": _buildAnalysisWorkflowGraph(

View file

@ -9,31 +9,33 @@ Unified AI Workspace feature.
import logging
from typing import Dict, List, Any
from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
FEATURE_CODE = "workspace"
FEATURE_LABEL = "AI Workspace"
FEATURE_LABEL = t("AI Workspace", context="UI")
FEATURE_ICON = "mdi-brain"
UI_OBJECTS = [
{
"objectKey": "ui.feature.workspace.dashboard",
"label": "Dashboard",
"label": t("Dashboard", context="UI"),
"meta": {"area": "dashboard"}
},
{
"objectKey": "ui.feature.workspace.editor",
"label": "Editor",
"label": t("Editor", context="UI"),
"meta": {"area": "editor"}
},
{
"objectKey": "ui.feature.workspace.settings",
"label": "Einstellungen",
"label": t("Einstellungen", context="UI"),
"meta": {"area": "settings"}
},
{
"objectKey": "ui.feature.workspace.rag-insights",
"label": "Wissens-Insights",
"label": t("Wissens-Insights", context="UI"),
"meta": {"area": "rag-insights"},
},
]
@ -41,37 +43,37 @@ UI_OBJECTS = [
RESOURCE_OBJECTS = [
{
"objectKey": "resource.feature.workspace.start",
"label": "Agent starten",
"label": t("Agent starten", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/start/stream", "method": "POST"}
},
{
"objectKey": "resource.feature.workspace.stop",
"label": "Agent stoppen",
"label": t("Agent stoppen", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/{workflowId}/stop", "method": "POST"}
},
{
"objectKey": "resource.feature.workspace.files",
"label": "Dateien verwalten",
"label": t("Dateien verwalten", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/files", "method": "GET"}
},
{
"objectKey": "resource.feature.workspace.folders",
"label": "Ordner verwalten",
"label": t("Ordner verwalten", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/folders", "method": "GET"}
},
{
"objectKey": "resource.feature.workspace.datasources",
"label": "Datenquellen",
"label": t("Datenquellen", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/datasources", "method": "GET"}
},
{
"objectKey": "resource.feature.workspace.voice",
"label": "Spracheingabe/-ausgabe",
"label": t("Spracheingabe/-ausgabe", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/voice/*", "method": "POST"}
},
{
"objectKey": "resource.feature.workspace.edits",
"label": "Datei-Aenderungen pruefen",
"label": t("Datei-Aenderungen pruefen", context="UI"),
"meta": {"endpoint": "/api/workspace/{instanceId}/edit/*", "method": "POST"}
},
]

View file

@ -1906,6 +1906,7 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
"resource.store.workspace",
"resource.store.commcoach",
"resource.store.trustee",
"resource.store.graphicalEditor",
]
storeRules = []

View file

@ -123,6 +123,9 @@ def _getFeatureUiObjects(featureCode: str) -> List[Dict[str, Any]]:
elif featureCode == "workspace":
from modules.features.workspace.mainWorkspace import UI_OBJECTS
return UI_OBJECTS
elif featureCode == "redmine":
from modules.features.redmine.mainRedmine import UI_OBJECTS
return UI_OBJECTS
else:
logger.debug(f"Skipping removed feature code: {featureCode}")
return []

View file

@ -7,6 +7,7 @@ import logging
from typing import List, Dict, Any, Optional
from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolDefinition
from modules.shared.timeUtils import getRequestNow, getRequestTimezone
logger = logging.getLogger(__name__)
@ -322,6 +323,27 @@ def _buildSummaryPrompt(
return prompt
def _buildTemporalContext() -> str:
"""Inject current date/time (in the user's browser timezone) into the system prompt.
LLMs have no innate access to "now" and otherwise hallucinate from their
training cutoff. The browser timezone is propagated via the
``X-User-Timezone`` request header (see ``api.ts`` axios interceptor and the
``_requestContextMiddleware`` in ``app.py``). When called outside of an HTTP
context, ``getRequestNow()`` falls back to UTC.
"""
tz = getRequestTimezone()
now = getRequestNow()
return (
"## Current Date & Time\n"
f"- Today: {now.strftime('%Y-%m-%d (%A)')}\n"
f"- Now: {now.strftime('%H:%M')} ({tz})\n"
"- Use this for any relative time references such as \"today\", "
"\"yesterday\", \"last week\", \"this month\", \"Q1\", etc.\n"
"- Do NOT rely on your training cutoff for the current date.\n\n"
)
def buildSystemPrompt(
tools: List[ToolDefinition],
toolsFormatted: str = None,
@ -342,8 +364,9 @@ def buildSystemPrompt(
)
prompt = (
f"{langInstruction}"
"You are an AI agent with access to tools. "
_buildTemporalContext()
+ f"{langInstruction}"
+ "You are an AI agent with access to tools. "
"Use the provided tools to accomplish the user's task. "
"Think step by step. Call tools when you need information or need to perform actions. "
"When you have enough information to answer, respond directly without calling tools.\n\n"

View file

@ -235,11 +235,15 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
registry.register(
"browseDataSource", _browseDataSource,
description=(
"Browse files and folders in a data source. Accepts either:\n"
"Browse files, folders, or emails in a data source. Accepts either:\n"
"- dataSourceId (for attached data sources shown in the prompt), OR\n"
"- connectionId + service (for direct connection access via listConnections).\n"
"Default page size is connector-specific (~100 entries). Use the `limit` parameter "
"to request more (e.g. when the user explicitly asks for ALL items in a folder)."
"\n"
"DEFAULT BEHAVIOUR: omit `limit` to get the connector's full default page. "
"For mail folders (Outlook/Gmail) the default returns up to 100 newest "
"messages -- DO NOT pass a smaller limit just to be safe; users almost "
"always want the full default page or explicitly more. Only set `limit` "
"when the user asks for a specific number (e.g. 'show me the latest 5 mails')."
),
parameters={
"type": "object",
@ -253,8 +257,11 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
"limit": {
"type": "integer",
"description": (
"Maximum number of entries to return (max 1000 for mail, "
"connector-specific elsewhere). Omit for the connector's default."
"OPTIONAL. Maximum number of entries to return. OMIT this "
"parameter to get the connector default (100 for mail, "
"connector-specific elsewhere). Only set this when the user "
"explicitly asks for a specific count, OR when the previous "
"browse hit the default and you need MORE (then set up to 1000)."
),
"minimum": 1,
"maximum": 1000,

View file

@ -0,0 +1,401 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Email management agent tools (reply, forward, move, delete, flag, folder ops).
Complements ``_connectionTools.sendMail`` -- which only knows how to compose a
brand-new email -- with the full Outlook mail-management surface the agent
needs to actually answer existing threads and curate the inbox.
All tools resolve their target Outlook adapter via
``ConnectorResolver.resolveService(connectionId, "outlook")`` and require an
``msft`` UserConnection. They live in the ``email`` toolbox.
"""
from __future__ import annotations
import logging
from typing import Any, Dict, List, Optional
from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResult
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
_buildResolverDbFromServices,
)
logger = logging.getLogger(__name__)
async def _resolveOutlookAdapter(services, connectionId: str):
"""Resolve an Outlook adapter for ``connectionId``.
Centralised because every tool here needs the exact same boilerplate.
Raises ``ValueError`` if the connection cannot be resolved so callers can
return a clean ToolResult error.
"""
from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
)
adapter = await resolver.resolveService(connectionId, "outlook")
if adapter is None:
raise ValueError(f"Could not resolve Outlook adapter for connection '{connectionId}'")
return adapter
def _extractMessageId(args: Dict[str, Any]) -> str:
"""Pull a Graph message ID from args.
Accepts both ``messageId`` (preferred) and ``filePath`` -- the latter is
what ``browseDataSource`` returns in its ``path:`` field for mail entries
(``/<folderId>/<messageId>``). The LLM often passes that verbatim.
"""
messageId = (args.get("messageId") or "").strip()
if messageId:
return messageId
raw = (args.get("filePath") or args.get("path") or "").strip()
if raw:
return raw.strip("/").split("/")[-1]
return ""
def _registerEmailTools(registry: ToolRegistry, services):
"""Register Outlook reply/forward/move/delete/flag tools on ``registry``."""
# ------------------------------------------------------------------
# Reply / Reply-All / Forward
# ------------------------------------------------------------------
async def _replyToMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
comment = args.get("comment") or args.get("body") or ""
replyAll = bool(args.get("replyAll", False))
draft = bool(args.get("draft", False))
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="replyToMail", success=False,
error="connectionId and messageId are required (messageId or filePath from browseDataSource)")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
if draft:
if not hasattr(adapter, "createReplyDraft"):
return ToolResult(toolCallId="", toolName="replyToMail", success=False, error="Adapter does not support reply drafts")
result = await adapter.createReplyDraft(messageId, comment=comment, replyAll=replyAll)
else:
if not hasattr(adapter, "replyToMail"):
return ToolResult(toolCallId="", toolName="replyToMail", success=False, error="Adapter does not support replies")
result = await adapter.replyToMail(messageId, comment=comment, replyAll=replyAll)
if "error" in result:
return ToolResult(toolCallId="", toolName="replyToMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="replyToMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="replyToMail", success=False, error=str(e))
async def _forwardMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
comment = args.get("comment") or args.get("body") or ""
to = args.get("to") or []
draft = bool(args.get("draft", False))
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="forwardMail", success=False,
error="connectionId and messageId are required")
if not draft and not to:
return ToolResult(toolCallId="", toolName="forwardMail", success=False,
error="`to` recipients are required when not creating a draft")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
if draft:
result = await adapter.createForwardDraft(messageId, to=to or None, comment=comment)
else:
result = await adapter.forwardMail(messageId, to=to, comment=comment)
if "error" in result:
return ToolResult(toolCallId="", toolName="forwardMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="forwardMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="forwardMail", success=False, error=str(e))
# ------------------------------------------------------------------
# Move / Copy / Delete / Archive
# ------------------------------------------------------------------
async def _moveMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
destination = (args.get("destinationFolder") or args.get("destination") or "").strip()
copyMode = bool(args.get("copy", False))
messageId = _extractMessageId(args)
if not connectionId or not messageId or not destination:
return ToolResult(toolCallId="", toolName="moveMail", success=False,
error="connectionId, messageId, and destinationFolder are required")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
method = adapter.copyMail if copyMode else adapter.moveMail
result = await method(messageId, destination)
if "error" in result:
return ToolResult(toolCallId="", toolName="moveMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="moveMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="moveMail", success=False, error=str(e))
async def _deleteMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
hardDelete = bool(args.get("hardDelete", False))
# Hard-delete is irreversible -- require an explicit confirmation flag
# so a single misroutet LLM call cannot wipe a mailbox. The default
# behaviour (move to Deleted Items) is recoverable from Outlook UI.
confirmed = bool(args.get("confirmedHardDelete", False))
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="deleteMail", success=False,
error="connectionId and messageId are required")
if hardDelete and not confirmed:
return ToolResult(toolCallId="", toolName="deleteMail", success=False,
error="Hard-delete requires confirmedHardDelete=true (irreversible). Default deleteMail (without hardDelete) moves the mail to Deleted Items where it can still be recovered.")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
result = await adapter.deleteMail(messageId, hardDelete=hardDelete)
if "error" in result:
return ToolResult(toolCallId="", toolName="deleteMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="deleteMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="deleteMail", success=False, error=str(e))
async def _archiveMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="archiveMail", success=False,
error="connectionId and messageId are required")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
result = await adapter.archiveMail(messageId)
if "error" in result:
return ToolResult(toolCallId="", toolName="archiveMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="archiveMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="archiveMail", success=False, error=str(e))
# ------------------------------------------------------------------
# Read-state / Flag
# ------------------------------------------------------------------
async def _setMailReadState(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
isRead = bool(args.get("isRead", True))
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="setMailReadState", success=False,
error="connectionId and messageId are required")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
method = adapter.markMailAsRead if isRead else adapter.markMailAsUnread
result = await method(messageId)
if "error" in result:
return ToolResult(toolCallId="", toolName="setMailReadState", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="setMailReadState", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="setMailReadState", success=False, error=str(e))
async def _flagMail(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
flagStatus = (args.get("flagStatus") or "flagged").strip()
messageId = _extractMessageId(args)
if not connectionId or not messageId:
return ToolResult(toolCallId="", toolName="flagMail", success=False,
error="connectionId and messageId are required")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
result = await adapter.flagMail(messageId, flagStatus=flagStatus)
if "error" in result:
return ToolResult(toolCallId="", toolName="flagMail", success=False, error=str(result["error"]))
return ToolResult(toolCallId="", toolName="flagMail", success=True, data=str(result))
except Exception as e:
return ToolResult(toolCallId="", toolName="flagMail", success=False, error=str(e))
# ------------------------------------------------------------------
# Folder discovery
# ------------------------------------------------------------------
async def _listMailFolders(args: Dict[str, Any], context: Dict[str, Any]):
connectionId = (args.get("connectionId") or "").strip()
if not connectionId:
return ToolResult(toolCallId="", toolName="listMailFolders", success=False,
error="connectionId is required")
try:
adapter = await _resolveOutlookAdapter(services, connectionId)
if not hasattr(adapter, "listMailFolders"):
return ToolResult(toolCallId="", toolName="listMailFolders", success=False, error="Adapter does not support listMailFolders")
folders = await adapter.listMailFolders()
if not folders:
return ToolResult(toolCallId="", toolName="listMailFolders", success=True, data="No mail folders found.")
lines = [
f"- {f['displayName']} id={f['id']} total={f['totalItemCount']} unread={f['unreadItemCount']}"
for f in folders
]
return ToolResult(toolCallId="", toolName="listMailFolders", success=True, data="\n".join(lines))
except Exception as e:
return ToolResult(toolCallId="", toolName="listMailFolders", success=False, error=str(e))
# ------------------------------------------------------------------
# Tool registration
# ------------------------------------------------------------------
_baseConnParam = {
"connectionId": {"type": "string", "description": "UserConnection UUID for the Outlook account (from listConnections)"},
}
_baseMessageParam = {
"messageId": {
"type": "string",
"description": "Graph message ID of the target mail. Pass either this OR filePath -- both accepted.",
},
"filePath": {
"type": "string",
"description": "Mail path as returned by browseDataSource (e.g. '/<folderId>/<messageId>'). Convenience alias for messageId.",
},
}
registry.register(
"replyToMail", _replyToMail,
description=(
"Reply (or reply-all) to an existing email. Preserves the conversation thread "
"and the 'AW:' prefix in Outlook -- USE THIS instead of sendMail when the user "
"asks you to answer an existing message. Set draft=true to create a draft in the "
"Drafts folder for review instead of sending immediately."
),
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"comment": {"type": "string", "description": "Reply body text (HTML supported by Outlook)"},
"replyAll": {"type": "boolean", "description": "If true, reply to ALL recipients of the original mail"},
"draft": {"type": "boolean", "description": "If true, save as draft in Drafts folder instead of sending"},
},
"required": ["connectionId", "comment"],
},
readOnly=False,
)
registry.register(
"forwardMail", _forwardMail,
description=(
"Forward an existing email to new recipients. Set draft=true to create a draft "
"(in which case `to` may be omitted and filled in later by the user)."
),
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"to": {"type": "array", "items": {"type": "string"}, "description": "Recipient email addresses"},
"comment": {"type": "string", "description": "Optional message to prepend to the forwarded mail"},
"draft": {"type": "boolean", "description": "If true, save as draft instead of sending"},
},
"required": ["connectionId"],
},
readOnly=False,
)
registry.register(
"moveMail", _moveMail,
description=(
"Move (or copy) an existing email into another mail folder. The destination can "
"be a well-known folder name ('inbox', 'archive', 'deleteditems', 'sentitems', "
"'drafts', 'junkemail'), a localized display name ('Posteingang', 'Archiv', "
"'Papierkorb'), or a Graph folder ID from listMailFolders. Set copy=true to copy "
"instead of moving."
),
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"destinationFolder": {"type": "string", "description": "Target folder: well-known name, displayName, or folder id"},
"copy": {"type": "boolean", "description": "If true, copy instead of moving (original stays in place)"},
},
"required": ["connectionId", "destinationFolder"],
},
readOnly=False,
)
registry.register(
"deleteMail", _deleteMail,
description=(
"Delete an email. By default the message is moved to 'Deleted Items' (the same "
"as pressing Delete in Outlook -- recoverable). Set hardDelete=true together with "
"confirmedHardDelete=true to permanently and irrecoverably remove it -- only do "
"this when the user has explicitly asked for permanent deletion."
),
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"hardDelete": {"type": "boolean", "description": "Permanently delete instead of moving to Deleted Items"},
"confirmedHardDelete": {"type": "boolean", "description": "Required confirmation flag when hardDelete=true"},
},
"required": ["connectionId"],
},
readOnly=False,
)
registry.register(
"archiveMail", _archiveMail,
description="Move an email to the Archive folder. Convenience wrapper around moveMail with destinationFolder='archive'.",
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
},
"required": ["connectionId"],
},
readOnly=False,
)
registry.register(
"setMailReadState", _setMailReadState,
description="Mark an email as read (isRead=true) or unread (isRead=false).",
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"isRead": {"type": "boolean", "description": "true => mark as read, false => mark as unread"},
},
"required": ["connectionId"],
},
readOnly=False,
)
registry.register(
"flagMail", _flagMail,
description="Set or clear the follow-up flag on an email. flagStatus = 'flagged' (default), 'complete', or 'notFlagged'.",
parameters={
"type": "object",
"properties": {
**_baseConnParam,
**_baseMessageParam,
"flagStatus": {"type": "string", "enum": ["flagged", "complete", "notFlagged"], "description": "Flag state"},
},
"required": ["connectionId"],
},
readOnly=False,
)
registry.register(
"listMailFolders", _listMailFolders,
description=(
"List all mail folders in the connected Outlook mailbox with id, displayName, "
"totalItemCount and unreadItemCount. Use this BEFORE moveMail when the user "
"names a non-standard folder so you can resolve the correct folder ID."
),
parameters={
"type": "object",
"properties": {
**_baseConnParam,
},
"required": ["connectionId"],
},
readOnly=True,
)

View file

@ -8,6 +8,7 @@ from modules.serviceCenter.services.serviceAgent.coreTools._workspaceTools impor
from modules.serviceCenter.services.serviceAgent.coreTools._connectionTools import _registerConnectionTools
from modules.serviceCenter.services.serviceAgent.coreTools._dataSourceTools import _registerDataSourceTools
from modules.serviceCenter.services.serviceAgent.coreTools._documentTools import _registerDocumentTools
from modules.serviceCenter.services.serviceAgent.coreTools._emailTools import _registerEmailTools
from modules.serviceCenter.services.serviceAgent.coreTools._mediaTools import _registerMediaTools
from modules.serviceCenter.services.serviceAgent.coreTools._featureSubAgentTools import _registerFeatureSubAgentTools
from modules.serviceCenter.services.serviceAgent.coreTools._crossWorkflowTools import _registerCrossWorkflowTools
@ -22,6 +23,7 @@ def registerCoreTools(registry: ToolRegistry, services):
_registerConnectionTools(registry, services)
_registerDataSourceTools(registry, services)
_registerDocumentTools(registry, services)
_registerEmailTools(registry, services)
_registerMediaTools(registry, services)
_registerFeatureSubAgentTools(registry, services)
_registerCrossWorkflowTools(registry, services)

View file

@ -22,6 +22,7 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.featureDataProvider import FeatureDataProvider
from modules.shared.i18nRegistry import resolveText
from modules.shared.timeUtils import getRequestNow, getRequestTimezone
logger = logging.getLogger(__name__)
@ -321,9 +322,21 @@ def _buildSchemaContext(
header += f' (instance: "{instanceLabel}")'
header += "."
tz = getRequestTimezone()
now = getRequestNow()
temporalLines = [
"CURRENT DATE & TIME (use this for relative time references in filters):",
f" Today: {now.strftime('%Y-%m-%d (%A)')}",
f" Now: {now.strftime('%H:%M')} ({tz})",
" Resolve phrases like 'today', 'last month', 'Q1', 'this year' against THIS date.",
" Do NOT use your training cutoff for date filters.",
]
parts = [
header,
"",
*temporalLines,
"",
"AVAILABLE TABLES (use EXACTLY these names as tableName parameter):",
*tableBlocks,
"",

View file

@ -148,11 +148,25 @@ def _registerDefaultToolboxes() -> None:
ToolboxDefinition(
id="email",
label="Email",
description="Send emails or save as draft via Outlook (supports HTML body and file attachments). Use sendMail with draft=true for drafts.",
description=(
"Outlook mail management: send/draft new mails, reply or forward existing "
"messages (preserves the conversation thread), move/copy/delete/archive "
"mails, mark as read/unread, set follow-up flags, and list mail folders. "
"Use replyToMail (NOT sendMail) when answering an existing message so the "
"Outlook thread stays intact."
),
requiresConnection="msft",
isDefault=False,
tools=[
"sendMail",
"replyToMail",
"forwardMail",
"moveMail",
"deleteMail",
"archiveMail",
"setMailReadState",
"flagMail",
"listMailFolders",
],
),
ToolboxDefinition(

View file

@ -36,6 +36,16 @@ class AttributeDefinition(BaseModel):
fkSource: Optional[str] = None
fkDisplayField: Optional[str] = None
fkModel: Optional[str] = None # DB table / Pydantic model name for server-side FK sort (JOIN)
# ------------------------------------------------------------------
# Render hints for the frontend FormGenerator / Tables.
# ``frontendFormat`` is an Excel-style format string the FE applies to numeric,
# int, binary or unit values (e.g. "R:#'###.00", "L:0.000", "M:b", "R:@CHF@ #'###.00").
# ``frontendFormatLabels`` carries i18n-resolved string tokens referenced by the
# format (e.g. boolean labels ["Ja", "-", "Nein"]). They are pre-translated server
# side so the FE can render them as-is without another i18n round-trip.
# ------------------------------------------------------------------
frontendFormat: Optional[str] = None
frontendFormatLabels: Optional[List[str]] = None
def _getModelLabelEntry(modelName: str) -> Dict[str, Any]:
@ -138,6 +148,11 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
frontend_fk_source = None # FK dropdown source (e.g., "/api/users/")
frontend_fk_display_field = None # Which field of the FK target to display (e.g., "username", "name")
fk_model = None # Same as fk_model in json_schema_extra — backend JOIN target table name
# Render hints (cf. AttributeDefinition.frontendFormat / frontendFormatLabels).
# Optional Excel-like format string ("R:#'###.00") plus translatable label tokens
# for boolean/categorical render (e.g. ["Ja","-","Nein"] resolved via @i18nModel).
frontend_format = None
frontend_format_labels = None
if field_info:
# Try direct attributes first (though these won't exist for custom kwargs)
@ -196,6 +211,18 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
frontend_fk_display_field = json_extra.get("frontend_fk_display_field")
if "fk_model" in json_extra:
fk_model = json_extra.get("fk_model")
if frontend_format is None and "frontend_format" in json_extra:
frontend_format = json_extra.get("frontend_format")
if frontend_format_labels is None and "frontend_format_labels" in json_extra:
frontend_format_labels = json_extra.get("frontend_format_labels")
# Render hints can also come via FieldInfo.extra (older Pydantic kwargs path)
if hasattr(field_info, "extra") and isinstance(field_info.extra, dict):
extra_dict = field_info.extra
if frontend_format is None and "frontend_format" in extra_dict:
frontend_format = extra_dict.get("frontend_format")
if frontend_format_labels is None and "frontend_format_labels" in extra_dict:
frontend_format_labels = extra_dict.get("frontend_format_labels")
# Use frontend type if available, otherwise detect from Python type
if frontend_type:
@ -274,6 +301,18 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
if fk_model:
attr_def["fkModel"] = fk_model
# Render hints (Excel-like format string + i18n-resolved label tokens).
# Labels are resolved server-side via resolveText() so the FE renders them
# verbatim (no double-translation, no missing-key brackets in the table).
if frontend_format:
attr_def["frontendFormat"] = frontend_format
if frontend_format_labels and isinstance(frontend_format_labels, list):
from modules.shared.i18nRegistry import resolveText
attr_def["frontendFormatLabels"] = [
resolveText(lbl) if isinstance(lbl, (str, dict)) else str(lbl)
for lbl in frontend_format_labels
]
attributes.append(attr_def)
return {"model": model_label, "attributes": attributes}

View file

@ -214,6 +214,18 @@ def i18nModel(modelLabel: str, aiContext: str = ""):
else:
attributes[fieldName] = fieldName
# Render-hint label tokens (frontend_format_labels) are user-visible
# strings that appear in tables/forms (e.g. boolean labels
# ["Ja","-","Nein"], unit suffixes ["KB","MB","GB",...]). Register
# each non-empty token under a per-field context so they appear in
# the xx base set and get AI-translated like every other UI string.
formatLabels = extra.get("frontend_format_labels")
if isinstance(formatLabels, list):
fmtCtx = f"table.{className}.{fieldName}.format"
for token in formatLabels:
if isinstance(token, str) and token.strip():
t(token, fmtCtx, "")
MODEL_LABELS[className] = {
"model": modelLabel,
"attributes": attributes,

View file

@ -5,14 +5,85 @@ Timezone utilities for consistent timestamp handling across the gateway.
Ensures all timestamps are properly handled as UTC.
"""
from contextvars import ContextVar
from datetime import datetime, timezone
from typing import Optional, Any
import time
import logging
# Configure logger
try:
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
except ImportError:
ZoneInfo = None
ZoneInfoNotFoundError = Exception
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Per-request user timezone (set by middleware from X-User-Timezone header)
#
# Mirrors the i18n language ContextVar pattern in modules.shared.i18nRegistry:
# the browser knows its IANA timezone (Intl.DateTimeFormat().resolvedOptions().timeZone),
# the frontend axios interceptor sends it as X-User-Timezone, and the gateway
# middleware writes it into _CURRENT_TIMEZONE for any handler/agent to read.
#
# Storage stays UTC everywhere (getUtcTimestamp / getIsoTimestamp). Only
# user-visible "what is now?" decisions (AI-agent prompts, formatted display
# strings) should consult getRequestTimezone() / getRequestNow().
# ---------------------------------------------------------------------------
_DEFAULT_REQUEST_TZ = "UTC"
_CURRENT_TIMEZONE: ContextVar[str] = ContextVar("user_tz", default=_DEFAULT_REQUEST_TZ)
def _setRequestTimezone(tzName: str) -> None:
"""Set the current request's user timezone (called by gateway middleware).
Validates against zoneinfo; falls back to UTC for unknown/invalid names so
a malicious or stale header cannot break downstream code.
"""
if not tzName or not isinstance(tzName, str):
_CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
return
if ZoneInfo is None:
_CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
return
try:
ZoneInfo(tzName)
except (ZoneInfoNotFoundError, ValueError, OSError) as e:
logger.warning(
"Invalid timezone in X-User-Timezone header: %r (%s); falling back to %s",
tzName, type(e).__name__, _DEFAULT_REQUEST_TZ,
)
_CURRENT_TIMEZONE.set(_DEFAULT_REQUEST_TZ)
return
_CURRENT_TIMEZONE.set(tzName)
def getRequestTimezone() -> str:
"""Return the IANA timezone name for the current request (browser-supplied).
Defaults to ``UTC`` outside of an HTTP request context (e.g. scheduler) or
when the frontend did not send the header.
"""
return _CURRENT_TIMEZONE.get()
def getRequestNow() -> datetime:
"""Return current time as a timezone-aware datetime in the request's user TZ.
Use this for **user-visible** time values (agent prompts, formatted strings).
Use ``getUtcNow()`` / ``getUtcTimestamp()`` for storage and DB writes.
"""
tzName = getRequestTimezone()
if ZoneInfo is None:
return datetime.now(timezone.utc)
try:
return datetime.now(ZoneInfo(tzName))
except (ZoneInfoNotFoundError, ValueError, OSError):
return datetime.now(timezone.utc)
def getUtcNow() -> datetime:
"""
Get current time in UTC with timezone info.

View file

@ -510,9 +510,14 @@ RESOURCE_OBJECTS = [
},
{
"objectKey": "resource.store.trustee",
"label": "Store: Trustee",
"label": t("Store: Trustee", context="UI"),
"meta": {"category": "store", "featureCode": "trustee"}
},
{
"objectKey": "resource.store.graphicalEditor",
"label": t("Store: Workflow-Automation", context="UI"),
"meta": {"category": "store", "featureCode": "graphicalEditor"}
},
{
"objectKey": "resource.system.api.auth",
"label": "Authentifizierungs-API",

View file

@ -0,0 +1,7 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine workflow method: read / list / create / update / stats / sync."""
from .methodRedmine import MethodRedmine
__all__ = ["MethodRedmine"]

View file

@ -0,0 +1,2 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.

View file

@ -0,0 +1,38 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Shared helpers for Redmine workflow actions.
Keeps each action file focused on the business logic -- parameter
resolution, services lookup and ActionResult shaping live here.
"""
from typing import Any, Dict, Optional, Tuple
def resolveInstanceContext(services, parameters: Dict[str, Any]) -> Tuple[Any, Optional[str], str]:
"""Resolve ``(user, mandateId, featureInstanceId)`` for a workflow action.
The workflow runtime wires up ``services.user`` / ``services.mandateId``
/ ``services.featureInstanceId``. The action may override the instance
explicitly via ``parameters['featureInstanceId']`` so that the same
workflow template can be reused against different Redmine instances.
"""
featureInstanceId = parameters.get("featureInstanceId") or getattr(
services, "featureInstanceId", None
)
if not featureInstanceId:
raise ValueError("featureInstanceId is required")
mandateId = getattr(services, "mandateId", None)
user = getattr(services, "user", None)
if user is None:
raise ValueError("services.user is not available")
return user, mandateId, str(featureInstanceId)
def ticketToDict(ticket) -> Dict[str, Any]:
"""Compact dict representation for AI consumption -- strips ``raw``."""
if ticket is None:
return {}
payload = ticket.model_dump(exclude_none=True)
payload.pop("raw", None)
return payload

View file

@ -0,0 +1,65 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: create a new Redmine ticket."""
import logging
from typing import Any, Dict
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.datamodelRedmine import RedmineTicketCreateRequest
from modules.features.redmine.serviceRedmine import createTicket
from ._shared import resolveInstanceContext, ticketToDict
logger = logging.getLogger(__name__)
async def createTicketAction(self, parameters: Dict[str, Any]) -> ActionResult:
"""Create a Redmine ticket. ``subject`` and ``trackerId`` are required."""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
subject = parameters.get("subject")
trackerId = parameters.get("trackerId")
if not subject:
return ActionResult.isFailure(error="subject is required")
try:
trackerId_int = int(trackerId) if trackerId is not None else None
except (TypeError, ValueError):
return ActionResult.isFailure(error=f"trackerId must be an int, got {trackerId!r}")
if trackerId_int is None:
return ActionResult.isFailure(error="trackerId is required")
try:
payload = RedmineTicketCreateRequest(
subject=subject,
trackerId=trackerId_int,
description=parameters.get("description") or "",
statusId=_optInt(parameters.get("statusId")),
priorityId=_optInt(parameters.get("priorityId")),
assignedToId=_optInt(parameters.get("assignedToId")),
parentIssueId=_optInt(parameters.get("parentIssueId")),
fixedVersionId=_optInt(parameters.get("fixedVersionId")),
customFields=parameters.get("customFields") or None,
)
except Exception as exc:
return ActionResult.isFailure(error=f"Invalid create body: {exc}")
try:
created = await createTicket(user, mandateId, featureInstanceId, payload)
except Exception as exc:
logger.exception("redmine.createTicket failed")
return ActionResult.isFailure(error=f"Create ticket failed: {exc}")
return ActionResult.isSuccess(data={"ticket": ticketToDict(created)})
def _optInt(value: Any):
if value is None or value == "":
return None
try:
return int(value)
except (TypeError, ValueError):
return None

View file

@ -0,0 +1,64 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: fetch aggregated Redmine statistics from the mirror."""
import logging
from typing import Any, Dict, List, Optional
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.serviceRedmineStats import getStats
from ._shared import resolveInstanceContext
logger = logging.getLogger(__name__)
def _normalizeIntList(value: Any) -> Optional[List[int]]:
"""Accept ``None | int | "1,2,3" | [..]`` and return a list of ints."""
if value is None or value == "":
return None
if isinstance(value, int):
return [value]
if isinstance(value, str):
value = [v.strip() for v in value.split(",") if v.strip()]
if isinstance(value, list):
ids: List[int] = []
for v in value:
try:
ids.append(int(v))
except (TypeError, ValueError):
continue
return ids or None
return None
async def getStatsAction(self, parameters: Dict[str, Any]) -> ActionResult:
"""Return the same DTO as the ``/stats`` endpoint, cached per filter."""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
bucket = (parameters.get("bucket") or "week").lower()
if bucket not in {"day", "week", "month"}:
bucket = "week"
status_filter = (parameters.get("statusFilter") or "*").lower()
if status_filter not in {"*", "open", "closed"}:
status_filter = "*"
try:
stats = await getStats(
user, mandateId, featureInstanceId,
dateFrom=parameters.get("dateFrom") or None,
dateTo=parameters.get("dateTo") or None,
bucket=bucket,
trackerIds=_normalizeIntList(parameters.get("trackerIds")),
categoryIds=_normalizeIntList(parameters.get("categoryIds")),
statusFilter=status_filter,
)
except Exception as exc:
logger.exception("redmine.getStats failed")
return ActionResult.isFailure(error=f"Stats failed: {exc}")
return ActionResult.isSuccess(data={"stats": stats.model_dump(exclude_none=True)})

View file

@ -0,0 +1,82 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: list Redmine tickets from the mirror with filters."""
import logging
from typing import Any, Dict, List, Optional
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.serviceRedmine import listTickets
from ._shared import resolveInstanceContext, ticketToDict
logger = logging.getLogger(__name__)
def _normalizeTrackerIds(value: Any) -> Optional[List[int]]:
if value is None or value == "":
return None
if isinstance(value, int):
return [value]
if isinstance(value, str):
value = [v.strip() for v in value.split(",") if v.strip()]
if isinstance(value, list):
ids: List[int] = []
for v in value:
try:
ids.append(int(v))
except (TypeError, ValueError):
continue
return ids or None
return None
async def listTicketsAction(self, parameters: Dict[str, Any]) -> ActionResult:
"""List Redmine tickets from the local mirror."""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
trackerIds = _normalizeTrackerIds(parameters.get("trackerIds"))
statusFilter = (parameters.get("status") or "*").lower()
if statusFilter not in {"*", "open", "closed"}:
statusFilter = "*"
updatedFrom = parameters.get("dateFrom") or None
updatedTo = parameters.get("dateTo") or None
assignedToId: Optional[int] = None
if parameters.get("assignedToId") not in (None, ""):
try:
assignedToId = int(parameters["assignedToId"])
except (TypeError, ValueError):
return ActionResult.isFailure(error="assignedToId must be an int")
try:
tickets = listTickets(
user, mandateId, featureInstanceId,
trackerIds=trackerIds,
statusFilter=statusFilter,
updatedOnFrom=updatedFrom,
updatedOnTo=updatedTo,
assignedToId=assignedToId,
)
except Exception as exc:
logger.exception("redmine.listTickets failed")
return ActionResult.isFailure(error=f"List tickets failed: {exc}")
# AI-friendly pagination: always capped so we don't accidentally feed a
# 20k-ticket dump into a context window. Callers that need more must
# paginate via filters.
limit = 100
try:
limit = max(1, min(500, int(parameters.get("limit") or 100)))
except (TypeError, ValueError):
limit = 100
truncated = tickets[:limit]
return ActionResult.isSuccess(data={
"count": len(truncated),
"totalMatched": len(tickets),
"truncated": len(tickets) > limit,
"tickets": [ticketToDict(t) for t in truncated],
})

View file

@ -0,0 +1,46 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: read a single Redmine ticket from the mirror.
Returns ``ActionResult.data`` with a single ``ticket`` key so downstream
nodes (e.g. ``ai.prompt``) can reference the ticket fields through
``DataRef``s.
"""
import logging
from typing import Any, Dict
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.serviceRedmine import getTicket
from ._shared import resolveInstanceContext, ticketToDict
logger = logging.getLogger(__name__)
async def readTicket(self, parameters: Dict[str, Any]) -> ActionResult:
"""Read ``parameters['ticketId']`` from the local Redmine mirror."""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
raw_id = parameters.get("ticketId") or parameters.get("issueId")
if raw_id is None:
return ActionResult.isFailure(error="ticketId is required")
try:
ticketId = int(raw_id)
except (TypeError, ValueError):
return ActionResult.isFailure(error=f"ticketId must be an int, got {raw_id!r}")
try:
ticket = getTicket(user, mandateId, featureInstanceId, ticketId, includeRaw=False)
except Exception as exc:
logger.exception("redmine.readTicket failed for ticket %s", ticketId)
return ActionResult.isFailure(error=f"Read ticket failed: {exc}")
if ticket is None:
return ActionResult.isFailure(
error=f"Ticket #{ticketId} not found in mirror. Run redmine.runSync first?",
)
return ActionResult.isSuccess(data={"ticket": ticketToDict(ticket)})

View file

@ -0,0 +1,35 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: trigger an incremental (or full) Redmine mirror sync."""
import logging
from typing import Any, Dict
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.serviceRedmineSync import runSync as runMirrorSync
from ._shared import resolveInstanceContext
logger = logging.getLogger(__name__)
async def runSyncAction(self, parameters: Dict[str, Any]) -> ActionResult:
"""Pull ticket and relation updates into the local mirror.
Set ``force=True`` to ignore ``lastSyncAt`` and re-sync every issue
(expensive -- only use for initial seed or recovery).
"""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
force = bool(parameters.get("force") or False)
try:
result = await runMirrorSync(user, mandateId, featureInstanceId, force=force)
except Exception as exc:
logger.exception("redmine.runSync failed")
return ActionResult.isFailure(error=f"Sync failed: {exc}")
return ActionResult.isSuccess(data={"sync": result.model_dump(exclude_none=True)})

View file

@ -0,0 +1,68 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Workflow action: update a single Redmine ticket and refresh the mirror."""
import logging
from typing import Any, Dict
from modules.datamodels.datamodelChat import ActionResult
from modules.features.redmine.datamodelRedmine import RedmineTicketUpdateRequest
from modules.features.redmine.serviceRedmine import updateTicket
from ._shared import resolveInstanceContext, ticketToDict
logger = logging.getLogger(__name__)
async def updateTicketAction(self, parameters: Dict[str, Any]) -> ActionResult:
"""Update ``parameters['ticketId']`` with the given fields.
Only fields that are not ``None`` (and different from the current
Redmine state, enforced by the service) are sent to Redmine. An
optional ``notes`` string is appended as a journal entry.
"""
try:
user, mandateId, featureInstanceId = resolveInstanceContext(self.services, parameters)
except ValueError as exc:
return ActionResult.isFailure(error=str(exc))
raw_id = parameters.get("ticketId") or parameters.get("issueId")
if raw_id is None:
return ActionResult.isFailure(error="ticketId is required")
try:
ticketId = int(raw_id)
except (TypeError, ValueError):
return ActionResult.isFailure(error=f"ticketId must be an int, got {raw_id!r}")
try:
update = RedmineTicketUpdateRequest(
subject=parameters.get("subject"),
description=parameters.get("description"),
trackerId=_optInt(parameters.get("trackerId")),
statusId=_optInt(parameters.get("statusId")),
priorityId=_optInt(parameters.get("priorityId")),
assignedToId=_optInt(parameters.get("assignedToId")),
parentIssueId=_optInt(parameters.get("parentIssueId")),
fixedVersionId=_optInt(parameters.get("fixedVersionId")),
notes=parameters.get("notes"),
customFields=parameters.get("customFields") or None,
)
except Exception as exc:
return ActionResult.isFailure(error=f"Invalid update body: {exc}")
try:
updated = await updateTicket(user, mandateId, featureInstanceId, ticketId, update)
except Exception as exc:
logger.exception("redmine.updateTicket failed for ticket %s", ticketId)
return ActionResult.isFailure(error=f"Update ticket failed: {exc}")
return ActionResult.isSuccess(data={"ticket": ticketToDict(updated)})
def _optInt(value: Any):
if value is None or value == "":
return None
try:
return int(value)
except (TypeError, ValueError):
return None

View file

@ -0,0 +1,253 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Redmine workflow method.
Exposes read/write/stats/sync actions against a configured Redmine
feature instance. All reads go through the local mirror; writes update
Redmine and then the mirror (see ``serviceRedmine``).
This module is auto-discovered by ``methodDiscovery.py`` (any package
under ``modules.workflows.methods.method*`` with a ``MethodBase``
subclass is picked up). No manual registration needed.
"""
import logging
from modules.datamodels.datamodelWorkflowActions import (
WorkflowActionDefinition,
WorkflowActionParameter,
)
from modules.shared.frontendTypes import FrontendType
from modules.workflows.methods.methodBase import MethodBase
from .actions.createTicket import createTicketAction
from .actions.getStats import getStatsAction
from .actions.listTickets import listTicketsAction
from .actions.readTicket import readTicket
from .actions.runSync import runSyncAction
from .actions.updateTicket import updateTicketAction
logger = logging.getLogger(__name__)
class MethodRedmine(MethodBase):
"""Redmine read/write/stats/sync actions for the workflow runtime."""
def __init__(self, services):
super().__init__(services)
self.name = "redmine"
self.description = "Redmine ticketing: read, list, create, update, stats, sync."
self._actions = {
"readTicket": WorkflowActionDefinition(
actionId="redmine.readTicket",
description="Read a single Redmine ticket from the local mirror by ticketId.",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"ticketId": WorkflowActionParameter(
name="ticketId", type="int", frontendType=FrontendType.TEXT,
required=True, description="Redmine issue id to read",
),
},
execute=readTicket.__get__(self, self.__class__),
),
"listTickets": WorkflowActionDefinition(
actionId="redmine.listTickets",
description="List tickets from the mirror with optional filters (tracker, status, period, assignee).",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"trackerIds": WorkflowActionParameter(
name="trackerIds", type="list", frontendType=FrontendType.JSON,
required=False, description="Restrict to these tracker ids (list of int or comma-separated string).",
),
"status": WorkflowActionParameter(
name="status", type="str", frontendType=FrontendType.TEXT,
required=False, description="'open' | 'closed' | '*' (default '*').",
),
"dateFrom": WorkflowActionParameter(
name="dateFrom", type="str", frontendType=FrontendType.TEXT,
required=False, description="ISO date -- filter by 'updated_on >= dateFrom'.",
),
"dateTo": WorkflowActionParameter(
name="dateTo", type="str", frontendType=FrontendType.TEXT,
required=False, description="ISO date -- filter by 'updated_on <= dateTo'.",
),
"assignedToId": WorkflowActionParameter(
name="assignedToId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Only tickets assigned to this Redmine user id.",
),
"limit": WorkflowActionParameter(
name="limit", type="int", frontendType=FrontendType.TEXT,
required=False, description="Max tickets in the result (1-500, default 100).",
),
},
execute=listTicketsAction.__get__(self, self.__class__),
),
"createTicket": WorkflowActionDefinition(
actionId="redmine.createTicket",
description="Create a new Redmine ticket. Requires subject and trackerId.",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"subject": WorkflowActionParameter(
name="subject", type="str", frontendType=FrontendType.TEXT,
required=True, description="Ticket title.",
),
"trackerId": WorkflowActionParameter(
name="trackerId", type="int", frontendType=FrontendType.TEXT,
required=True, description="Tracker id (Userstory, Feature, Task ...).",
),
"description": WorkflowActionParameter(
name="description", type="str", frontendType=FrontendType.TEXTAREA,
required=False, description="Markdown/Textile description body.",
),
"statusId": WorkflowActionParameter(
name="statusId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Status id (optional, Redmine default otherwise).",
),
"priorityId": WorkflowActionParameter(
name="priorityId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Priority id.",
),
"assignedToId": WorkflowActionParameter(
name="assignedToId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Assignee user id.",
),
"parentIssueId": WorkflowActionParameter(
name="parentIssueId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Parent issue id (tree parent, not relation).",
),
"fixedVersionId": WorkflowActionParameter(
name="fixedVersionId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Target/fixed version id.",
),
"customFields": WorkflowActionParameter(
name="customFields", type="dict", frontendType=FrontendType.JSON,
required=False, description="Custom fields as {customFieldId: value}.",
),
},
execute=createTicketAction.__get__(self, self.__class__),
),
"updateTicket": WorkflowActionDefinition(
actionId="redmine.updateTicket",
description="Update a Redmine ticket. Only provided fields are sent.",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"ticketId": WorkflowActionParameter(
name="ticketId", type="int", frontendType=FrontendType.TEXT,
required=True, description="Redmine issue id to update",
),
"subject": WorkflowActionParameter(
name="subject", type="str", frontendType=FrontendType.TEXT,
required=False, description="New title.",
),
"description": WorkflowActionParameter(
name="description", type="str", frontendType=FrontendType.TEXTAREA,
required=False, description="New description.",
),
"trackerId": WorkflowActionParameter(
name="trackerId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change tracker.",
),
"statusId": WorkflowActionParameter(
name="statusId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change status.",
),
"priorityId": WorkflowActionParameter(
name="priorityId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change priority.",
),
"assignedToId": WorkflowActionParameter(
name="assignedToId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change assignee.",
),
"parentIssueId": WorkflowActionParameter(
name="parentIssueId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change parent issue.",
),
"fixedVersionId": WorkflowActionParameter(
name="fixedVersionId", type="int", frontendType=FrontendType.TEXT,
required=False, description="Change fixed version.",
),
"notes": WorkflowActionParameter(
name="notes", type="str", frontendType=FrontendType.TEXTAREA,
required=False, description="Journal entry (comment) added to the ticket.",
),
"customFields": WorkflowActionParameter(
name="customFields", type="dict", frontendType=FrontendType.JSON,
required=False, description="Custom fields as {customFieldId: value}.",
),
},
execute=updateTicketAction.__get__(self, self.__class__),
),
"getStats": WorkflowActionDefinition(
actionId="redmine.getStats",
description="Aggregated stats (KPIs, throughput, status distribution, backlog) from the mirror.",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"dateFrom": WorkflowActionParameter(
name="dateFrom", type="str", frontendType=FrontendType.TEXT,
required=False, description="ISO date -- lower bound for 'created_in_period' / 'closed_in_period'.",
),
"dateTo": WorkflowActionParameter(
name="dateTo", type="str", frontendType=FrontendType.TEXT,
required=False, description="ISO date -- upper bound.",
),
"bucket": WorkflowActionParameter(
name="bucket", type="str", frontendType=FrontendType.TEXT,
required=False, description="'day' | 'week' | 'month' (default 'week').",
),
"trackerIds": WorkflowActionParameter(
name="trackerIds", type="list", frontendType=FrontendType.JSON,
required=False, description="Restrict to these tracker ids.",
),
},
execute=getStatsAction.__get__(self, self.__class__),
),
"runSync": WorkflowActionDefinition(
actionId="redmine.runSync",
description="Sync Redmine tickets and relations into the local mirror (incremental by default).",
dynamicMode=False,
parameters={
"featureInstanceId": WorkflowActionParameter(
name="featureInstanceId", type="str", frontendType=FrontendType.TEXT,
required=True, description="Redmine feature instance ID",
),
"force": WorkflowActionParameter(
name="force", type="bool", frontendType=FrontendType.CHECKBOX,
required=False, description="True -> ignore lastSyncAt and pull every issue.",
),
},
execute=runSyncAction.__get__(self, self.__class__),
),
}
self._validateActions()
# Expose the callables directly on the instance too so workflow
# engines that resolve by attribute (``method.actionName(...)``)
# rather than through the action dict also work.
self.readTicket = readTicket.__get__(self, self.__class__)
self.listTickets = listTicketsAction.__get__(self, self.__class__)
self.createTicket = createTicketAction.__get__(self, self.__class__)
self.updateTicket = updateTicketAction.__get__(self, self.__class__)
self.getStats = getStatsAction.__get__(self, self.__class__)
self.runSync = runSyncAction.__get__(self, self.__class__)

View file

@ -8,9 +8,13 @@ log_file = logs/test_logs.log
log_file_level = INFO
log_file_format = %(asctime)s %(levelname)s %(message)s
log_file_date_format = %Y-%m-%d %H:%M:%S
# Only run non-expensive tests by default, verbose log, short traceback
# Only run non-expensive and non-live tests by default, verbose log, short traceback
# Use 'pytest -m ""' to run ALL tests.
addopts = -v --tb=short -m 'not expensive'
addopts = -v --tb=short -m 'not expensive and not live'
markers =
expensive: tests that take longer than a few seconds (e.g. heavy DB or AI)
live: integration tests that hit a live external service (e.g. Redmine SSS sandbox)
# Suppress deprecation warnings from third-party libraries
filterwarnings =

0
tests/fixtures/__init__.py vendored Normal file
View file

73
tests/fixtures/loadRedmineSnapshot.py vendored Normal file
View file

@ -0,0 +1,73 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Load ``redmineSnapshot.json`` into in-memory ``RedmineTicketDto`` objects.
Used by all stats / orphan unit tests so they do not require any DB,
HTTP or live Redmine access.
"""
from __future__ import annotations
import json
from pathlib import Path
from typing import List, Optional, Tuple
from modules.features.redmine.datamodelRedmine import (
RedmineFieldChoiceDto,
RedmineFieldSchemaDto,
RedmineRelationDto,
RedmineTicketDto,
)
_SNAPSHOT_PATH = Path(__file__).parent / "redmineSnapshot.json"
def loadSnapshot() -> Tuple[RedmineFieldSchemaDto, List[RedmineTicketDto]]:
"""Return ``(schema, tickets)`` parsed from the JSON fixture."""
with _SNAPSHOT_PATH.open("r", encoding="utf-8") as f:
raw = json.load(f)
schema_raw = raw.get("schema") or {}
trackers_raw = schema_raw.get("trackers") or []
schema = RedmineFieldSchemaDto(
projectId=str(schema_raw.get("projectId") or ""),
projectName=str(schema_raw.get("projectName") or ""),
trackers=[RedmineFieldChoiceDto(**t) for t in trackers_raw],
statuses=[RedmineFieldChoiceDto(**s) for s in schema_raw.get("statuses") or []],
priorities=[RedmineFieldChoiceDto(**p) for p in schema_raw.get("priorities") or []],
users=[RedmineFieldChoiceDto(**u) for u in schema_raw.get("users") or []],
customFields=[],
rootTrackerName="Userstory",
rootTrackerId=_findRootTrackerId(trackers_raw),
)
tickets: List[RedmineTicketDto] = []
for issue in raw.get("issues") or []:
tickets.append(
RedmineTicketDto(
id=int(issue["id"]),
subject=str(issue.get("subject") or ""),
trackerId=issue.get("trackerId"),
trackerName=issue.get("trackerName"),
statusId=issue.get("statusId"),
statusName=issue.get("statusName"),
isClosed=bool(issue.get("isClosed")),
priorityId=issue.get("priorityId"),
priorityName=issue.get("priorityName"),
assignedToId=issue.get("assignedToId"),
assignedToName=issue.get("assignedToName"),
parentId=issue.get("parentId"),
createdOn=issue.get("createdOn"),
updatedOn=issue.get("updatedOn"),
relations=[RedmineRelationDto(**r) for r in issue.get("relations") or []],
)
)
return schema, tickets
def _findRootTrackerId(trackers) -> Optional[int]:
for t in trackers:
name = str(t.get("name") or "").strip().lower()
if name in ("userstory", "user story", "user-story"):
return int(t.get("id"))
return None

98
tests/fixtures/redmineSnapshot.json vendored Normal file
View file

@ -0,0 +1,98 @@
{
"_doc": "Synthetic Redmine snapshot for unit tests. Replace with real data via captureRedmineSnapshot.py against the SSS sandbox once the live tests are green.",
"schema": {
"projectId": "demo-project",
"projectName": "Demo Project",
"trackers": [
{"id": 1, "name": "Userstory"},
{"id": 2, "name": "Feature"},
{"id": 3, "name": "Acc.Crit"},
{"id": 4, "name": "Bug"},
{"id": 5, "name": "Task"}
],
"statuses": [
{"id": 1, "name": "Neu", "isClosed": false},
{"id": 2, "name": "In Bearbeitung", "isClosed": false},
{"id": 3, "name": "Review", "isClosed": false},
{"id": 4, "name": "Erledigt", "isClosed": true},
{"id": 5, "name": "Geschlossen", "isClosed": true}
],
"priorities": [
{"id": 1, "name": "Niedrig"},
{"id": 2, "name": "Normal"},
{"id": 3, "name": "Hoch"}
],
"users": [
{"id": 11, "name": "Anna Beispiel"},
{"id": 12, "name": "Bruno Test"}
],
"customFields": []
},
"issues": [
{
"id": 1001,
"subject": "Mandanten-Setup automatisieren",
"trackerId": 1, "trackerName": "Userstory",
"statusId": 2, "statusName": "In Bearbeitung", "isClosed": false,
"priorityId": 2, "priorityName": "Normal",
"assignedToId": 11, "assignedToName": "Anna Beispiel",
"createdOn": "2026-02-01T10:00:00Z", "updatedOn": "2026-04-10T09:00:00Z",
"relations": []
},
{
"id": 2001,
"subject": "Onboarding-Wizard UX",
"trackerId": 2, "trackerName": "Feature",
"statusId": 1, "statusName": "Neu", "isClosed": false,
"priorityId": 2, "priorityName": "Normal",
"assignedToId": 12, "assignedToName": "Bruno Test",
"createdOn": "2026-02-05T12:00:00Z", "updatedOn": "2026-03-01T08:00:00Z",
"relations": [
{"id": 901, "issueId": 2001, "issueToId": 1001, "relationType": "relates", "delay": null}
]
},
{
"id": 3001,
"subject": "AC: Wizard-Schritt 1 muss Mandant erkennen",
"trackerId": 3, "trackerName": "Acc.Crit",
"statusId": 4, "statusName": "Erledigt", "isClosed": true,
"priorityId": 2, "priorityName": "Normal",
"assignedToId": 12, "assignedToName": "Bruno Test",
"parentId": 2001,
"createdOn": "2026-02-10T08:00:00Z", "updatedOn": "2026-04-08T15:30:00Z",
"relations": []
},
{
"id": 4001,
"subject": "Bug: Wizard friert ein bei leerem Mandanten",
"trackerId": 4, "trackerName": "Bug",
"statusId": 5, "statusName": "Geschlossen", "isClosed": true,
"priorityId": 3, "priorityName": "Hoch",
"assignedToId": 11, "assignedToName": "Anna Beispiel",
"createdOn": "2026-03-15T12:00:00Z", "updatedOn": "2026-04-12T11:00:00Z",
"relations": [
{"id": 902, "issueId": 4001, "issueToId": 2001, "relationType": "blocks", "delay": null}
]
},
{
"id": 5001,
"subject": "Orphan: Refactor altes Logging-Modul",
"trackerId": 5, "trackerName": "Task",
"statusId": 1, "statusName": "Neu", "isClosed": false,
"priorityId": 1, "priorityName": "Niedrig",
"assignedToId": null, "assignedToName": null,
"createdOn": "2025-09-15T08:00:00Z", "updatedOn": "2025-10-01T10:00:00Z",
"relations": []
},
{
"id": 5002,
"subject": "Orphan: Doku Schemamigration",
"trackerId": 5, "trackerName": "Task",
"statusId": 2, "statusName": "In Bearbeitung", "isClosed": false,
"priorityId": 2, "priorityName": "Normal",
"assignedToId": 11, "assignedToName": "Anna Beispiel",
"createdOn": "2026-01-10T08:00:00Z", "updatedOn": "2026-02-15T10:00:00Z",
"relations": []
}
]
}

View file

@ -0,0 +1,48 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Pure-Python unit tests for the orphan detection in
``serviceRedmineStats._countOrphans``.
Snapshot in ``tests/fixtures/redmineSnapshot.json`` contains:
- 1x Userstory (1001) -- root
- 1x Feature (2001) related to 1001 -> reachable
- 1x Acc.Crit (3001) parent=2001 -> reachable
- 1x Bug (4001) blocks 2001 -> reachable via relation
- 2x Task (5001, 5002) -> orphan (no link to any User Story)
"""
from __future__ import annotations
from modules.features.redmine.serviceRedmineStats import _countOrphans
from tests.fixtures.loadRedmineSnapshot import loadSnapshot
class TestCountOrphans:
def test_orphansFromSnapshot(self) -> None:
schema, tickets = loadSnapshot()
orphans = _countOrphans(tickets, schema.rootTrackerId)
assert orphans == 2, "Two unrelated Tasks should be orphans"
def test_emptyListReturnsZero(self) -> None:
assert _countOrphans([], 1) == 0
def test_noRootTrackerCountsAllAsOrphan(self) -> None:
schema, tickets = loadSnapshot()
# Pretend there is no User Story tracker at all -- every ticket is orphan.
assert _countOrphans(tickets, None) == len(tickets)
def test_relationDirectionAgnostic(self) -> None:
"""A ticket reachable via the *target* side of a relation must not
be counted as orphan -- _countOrphans walks both directions."""
_, tickets = loadSnapshot()
bug = next(t for t in tickets if t.id == 4001)
# Bug 4001 -[blocks]-> 2001; it is the source. Reverse it: 2001 -[blocks]-> 4001
bug.relations = []
# Attach the relation on 2001 instead.
feature = next(t for t in tickets if t.id == 2001)
from modules.features.redmine.datamodelRedmine import RedmineRelationDto
feature.relations.append(
RedmineRelationDto(id=999, issueId=2001, issueToId=4001, relationType="blocks", delay=None)
)
orphans = _countOrphans(tickets, 1)
assert orphans == 2 # Tasks remain orphans, Bug is still reachable

View file

@ -0,0 +1,122 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Unit tests for the pure aggregation in ``serviceRedmineStats._aggregate``.
These tests run the whole bucket / KPI / aging logic against the static
fixture, with no I/O and no service / connector / DB.
"""
from __future__ import annotations
import datetime as _dt
from modules.features.redmine.serviceRedmineStats import (
_aggregate,
_backlogAging,
_bucketKey,
_kpis,
_relationDistribution,
_statusByTracker,
_throughput,
_topAssignees,
)
from tests.fixtures.loadRedmineSnapshot import loadSnapshot
class TestKpis:
def test_kpisCountTotalsCorrectly(self) -> None:
schema, tickets = loadSnapshot()
kpis = _kpis(tickets, schema.rootTrackerId, periodFrom=None, periodTo=None)
assert kpis.total == 6
assert kpis.open == 4
assert kpis.closed == 2
assert kpis.orphans == 2
def test_periodFiltersClosedAndCreated(self) -> None:
schema, tickets = loadSnapshot()
period_from = _dt.datetime(2026, 4, 1)
period_to = _dt.datetime(2026, 4, 30)
kpis = _kpis(tickets, schema.rootTrackerId, period_from, period_to)
assert kpis.closedInPeriod == 2 # 3001 + 4001 closed in April
assert kpis.createdInPeriod == 0 # nothing was created in April
class TestStatusByTracker:
def test_buildsOneEntryPerTracker(self) -> None:
schema, tickets = loadSnapshot()
rows = _statusByTracker(tickets, schema)
names = {r.trackerName for r in rows}
assert names == {"Userstory", "Feature", "Acc.Crit", "Bug", "Task"}
task_row = next(r for r in rows if r.trackerName == "Task")
assert task_row.total == 2
assert sum(task_row.countsByStatus.values()) == 2
class TestThroughput:
def test_bucketByMonthCountsClosed(self) -> None:
_schema, tickets = loadSnapshot()
period_from = _dt.datetime(2026, 4, 1)
period_to = _dt.datetime(2026, 4, 30)
out = _throughput(tickets, period_from, period_to, "month")
keys = [b.bucketKey for b in out]
assert "2026-04" in keys
april = next(b for b in out if b.bucketKey == "2026-04")
assert april.closed == 2
assert april.created == 0
def test_bucketByWeekIsoFormat(self) -> None:
when = _dt.datetime(2026, 4, 15)
key = _bucketKey(when, "week")
assert key.startswith("2026-W")
class TestTopAssignees:
def test_excludesClosedTickets(self) -> None:
_schema, tickets = loadSnapshot()
rows = _topAssignees(tickets, limit=10)
names = {r.name for r in rows}
# Anna has 1 open (1001), Bruno has 1 open (2001), unassigned has 1 (5001).
assert "Anna Beispiel" in names
assert "Bruno Test" in names
assert "(nicht zugewiesen)" in names
class TestRelationDistribution:
def test_dedupesByRelationId(self) -> None:
_schema, tickets = loadSnapshot()
rows = _relationDistribution(tickets)
types = {r.relationType for r in rows}
assert "relates" in types
assert "blocks" in types
for r in rows:
assert r.count >= 1
class TestBacklogAging:
def test_oldOrphansLandInOlderBuckets(self) -> None:
_schema, tickets = loadSnapshot()
now = _dt.datetime(2026, 5, 1)
buckets = _backlogAging(tickets, now=now)
gt180 = next(b for b in buckets if b.bucketKey == "gt180")
assert gt180.count >= 1
class TestAggregateEndToEnd:
def test_aggregateProducesAllSections(self) -> None:
schema, tickets = loadSnapshot()
dto = _aggregate(
tickets,
schema=schema,
rootTrackerId=schema.rootTrackerId,
dateFrom="2026-04-01",
dateTo="2026-04-30",
bucket="month",
trackerIdsFilter=[],
instanceId="test-instance",
)
assert dto.instanceId == "test-instance"
assert dto.kpis.total == 6
assert dto.kpis.orphans == 2
assert len(dto.statusByTracker) == 5
assert any(b.bucketKey == "2026-04" for b in dto.throughput)
assert dto.backlogAging[-1].bucketKey == "gt180"

View file

@ -0,0 +1,57 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Unit tests for ``RedmineStatsCache``.
Verifies TTL expiry, key composition, instance invalidation and process-wide
singleton behaviour.
"""
from __future__ import annotations
import time
from modules.features.redmine.serviceRedmineStatsCache import (
RedmineStatsCache,
_getStatsCache,
)
class TestRedmineStatsCache:
def test_getReturnsNoneOnMiss(self) -> None:
c = RedmineStatsCache(ttlSeconds=60)
key = c.buildKey("inst-a", "2026-01-01", "2026-01-31", "week", [1, 2])
assert c.get(key) is None
def test_setAndGetRoundTrip(self) -> None:
c = RedmineStatsCache(ttlSeconds=60)
key = c.buildKey("inst-a", None, None, "week", [])
c.set(key, {"answer": 42})
assert c.get(key) == {"answer": 42}
def test_keyIsOrderInsensitiveForTrackerIds(self) -> None:
c = RedmineStatsCache()
k1 = c.buildKey("inst-a", None, None, "week", [3, 1, 2])
k2 = c.buildKey("inst-a", None, None, "week", [1, 2, 3])
assert k1 == k2
def test_ttlExpiry(self) -> None:
c = RedmineStatsCache(ttlSeconds=0.05)
key = c.buildKey("inst-a", None, None, "week", [])
c.set(key, "value")
time.sleep(0.06)
assert c.get(key) is None
def test_invalidateInstanceDropsAllKeysForThatInstance(self) -> None:
c = RedmineStatsCache(ttlSeconds=60)
c.set(c.buildKey("inst-a", None, None, "week", []), "v1")
c.set(c.buildKey("inst-a", "2026-01-01", "2026-01-31", "month", [1]), "v2")
c.set(c.buildKey("inst-b", None, None, "week", []), "v3")
dropped = c.invalidateInstance("inst-a")
assert dropped == 2
assert c.get(c.buildKey("inst-a", None, None, "week", [])) is None
assert c.get(c.buildKey("inst-b", None, None, "week", [])) == "v3"
def test_singletonIsStable(self) -> None:
a = _getStatsCache()
b = _getStatsCache()
assert a is b

View file

@ -0,0 +1,58 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Validation: every label in feature ``main*.py`` catalog lists must be wrapped in ``t(...)``.
Background:
``UI_OBJECTS``, ``DATA_OBJECTS``, ``RESOURCE_OBJECTS``, ``WORKFLOW_DEFINITIONS`` etc.
in ``gateway/modules/features/<x>/main*.py`` define labels that the UDB and other
UI surfaces display. Bare-string labels (``"label": "Konfiguration"``) are not
registered with the i18n catalog at module-import time, so non-DE renders show
them as ``[Konfiguration]`` (the missing-translation marker from
``modules.shared.i18nRegistry.t()``).
This test scans every ``main*.py`` under ``gateway/modules/features`` and fails
if it finds bare-string labels, blocking the regression in CI.
Allowed exceptions:
- Labels that are NOT user-visible (e.g. internal demo seed data, fixtures).
Add their file to ``_ALLOWED_FILES_WITH_BARE_LABELS`` with a justification.
"""
from __future__ import annotations
import re
from pathlib import Path
import pytest
_FEATURES_DIR = Path(__file__).resolve().parents[2] / "modules" / "features"
_BARE_LABEL_PATTERN = re.compile(r'^\s*"label"\s*:\s*"[^"]+"', re.MULTILINE)
# mainRealEstate.py contains "label": "AA1704" inside a multi-line f-string
# that is used as a JSON example in an AI prompt -- not a real catalog entry.
_ALLOWED_FILES_WITH_BARE_LABELS: set[str] = {
"mainRealEstate.py",
}
def _findFeatureMainFiles() -> list[Path]:
return sorted(_FEATURES_DIR.glob("*/main*.py"))
@pytest.mark.parametrize("mainFile", _findFeatureMainFiles(), ids=lambda p: p.name)
def test_noBareLabelsInFeatureCatalog(mainFile: Path) -> None:
if mainFile.name in _ALLOWED_FILES_WITH_BARE_LABELS:
pytest.skip(f"{mainFile.name} explicitly allowed (legacy seed data)")
text = mainFile.read_text(encoding="utf-8")
matches = _BARE_LABEL_PATTERN.findall(text)
assert not matches, (
f"\n{mainFile.relative_to(_FEATURES_DIR.parent.parent)} contains "
f"{len(matches)} bare-string labels that are NOT registered with i18n.\n"
f"Wrap each label with t(\"...\", context=\"UI\") so non-DE locales\n"
f"don't render them as [missing-key].\n\n"
f"Sample offending lines:\n "
+ "\n ".join(matches[:5])
)