gateway/modules/features/codeeditor/promptAssembly.py
2026-02-23 18:35:36 +01:00

105 lines
3.3 KiB
Python

# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""Prompt assembly for the CodeEditor feature.
Builds Cursor-style system prompts with file context and format instructions."""
import logging
from typing import List, Optional, Dict, Any
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum
from modules.features.codeeditor.datamodelCodeeditor import FileContext
logger = logging.getLogger(__name__)
SYSTEM_PROMPT = """You are an AI assistant for text and code file editing. You receive files as context and can suggest changes.
## Rules for file edits
- Use ```file_edit``` blocks for file changes
- Each file_edit block must contain: fileName, oldContent (exact text to replace), newContent (replacement text)
- Explain changes in normal text before or after the block
- oldContent must EXACTLY match existing content (including whitespace and indentation)
- You may propose edits to multiple files in one response
## Response format
Normal text is displayed as explanation.
File changes must use this format:
```file_edit
fileName: <filename>
oldContent: |
<exact existing content to replace>
newContent: |
<new replacement content>
```
Code examples (without edits) use standard markdown code blocks:
```language
code here
```
## Important
- Only edit files that are provided in context
- Make minimal, targeted changes
- Preserve existing formatting and style
- If a task is unclear, ask for clarification instead of guessing"""
def buildRequest(
userPrompt: str,
fileContexts: List[FileContext],
chatHistory: Optional[List[Dict[str, Any]]] = None
) -> AiCallRequest:
"""Build an AiCallRequest with system prompt, file context, and user prompt."""
systemPart = SYSTEM_PROMPT
fileContextPart = _buildFileContext(fileContexts)
historyPart = _buildChatHistory(chatHistory) if chatHistory else ""
fullPrompt = systemPart
if historyPart:
fullPrompt += f"\n\n## Previous conversation\n{historyPart}"
fullPrompt += f"\n\n## User request\n{userPrompt}"
return AiCallRequest(
prompt=fullPrompt,
context=fileContextPart if fileContextPart else None,
options=AiCallOptions(
operationType=OperationTypeEnum.DATA_ANALYSE,
temperature=0.0,
compressPrompt=False,
compressContext=False,
resultFormat="txt"
)
)
def _buildFileContext(fileContexts: List[FileContext]) -> str:
"""Build the file context string with line numbers."""
if not fileContexts:
return ""
parts = []
for fc in fileContexts:
if not fc.content:
continue
lines = fc.content.split("\n")
numberedLines = [f"{i + 1}|{line}" for i, line in enumerate(lines)]
numbered = "\n".join(numberedLines)
parts.append(f"--- FILE: {fc.fileName} ---\n{numbered}\n--- END FILE ---")
return "\n\n".join(parts)
def _buildChatHistory(chatHistory: List[Dict[str, Any]]) -> str:
"""Build a condensed chat history string for multi-turn context."""
if not chatHistory:
return ""
parts = []
for msg in chatHistory[-10:]:
role = msg.get("role", "unknown")
content = msg.get("content", "")
if len(content) > 500:
content = content[:500] + "..."
parts.append(f"[{role}]: {content}")
return "\n".join(parts)