152 lines
6.1 KiB
Python
152 lines
6.1 KiB
Python
import logging
|
|
from typing import Dict, Any, List, Union, Optional
|
|
from modules.connectors.connectorAiOpenai import AiOpenai, ContextLengthExceededException
|
|
from modules.connectors.connectorAiAnthropic import AiAnthropic
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class AiCalls:
|
|
"""Interface for AI service interactions"""
|
|
|
|
def __init__(self):
|
|
self.openaiService = AiOpenai()
|
|
self.anthropicService = AiAnthropic()
|
|
|
|
async def callAiTextBasic(self, prompt: str, context: Optional[str] = None) -> str:
|
|
"""
|
|
Basic text processing using OpenAI.
|
|
|
|
Args:
|
|
prompt: The user prompt to process
|
|
context: Optional system context/prompt
|
|
|
|
Returns:
|
|
The AI response as text
|
|
"""
|
|
# Prepare messages in OpenAI format
|
|
messages = []
|
|
|
|
# Add system message if context provided
|
|
if context:
|
|
messages.append({
|
|
"role": "system",
|
|
"content": context
|
|
})
|
|
|
|
# Add user message
|
|
messages.append({
|
|
"role": "user",
|
|
"content": prompt
|
|
})
|
|
|
|
# Add language instruction for user-facing responses
|
|
if hasattr(self, 'userLanguage') and self.userLanguage:
|
|
ltext = f"Please respond in '{self.userLanguage}' language."
|
|
if messages and messages[0]["role"] == "system":
|
|
if "language" not in messages[0]["content"].lower():
|
|
messages[0]["content"] = f"{ltext} {messages[0]['content']}"
|
|
else:
|
|
messages.insert(0, {
|
|
"role": "system",
|
|
"content": ltext
|
|
})
|
|
|
|
try:
|
|
return await self.openaiService.callAiBasic(messages)
|
|
except ContextLengthExceededException as e:
|
|
logger.warning(f"OpenAI context length exceeded, falling back to Anthropic: {str(e)}")
|
|
# Fallback to Anthropic (AI Advanced) when context length is exceeded
|
|
return await self.callAiTextAdvanced(prompt, context, _is_fallback=True)
|
|
except Exception as e:
|
|
logger.error(f"Error in OpenAI call: {str(e)}")
|
|
return f"Error: {str(e)}"
|
|
|
|
async def callAiTextAdvanced(self, prompt: str, context: Optional[str] = None, _is_fallback: bool = False) -> str:
|
|
"""
|
|
Advanced text processing using Anthropic.
|
|
Fallback to OpenAI if Anthropic is overloaded or rate-limited.
|
|
|
|
Args:
|
|
prompt: The user prompt to process
|
|
context: Optional system context/prompt
|
|
_is_fallback: Internal flag to prevent infinite recursion
|
|
"""
|
|
# For Anthropic, we need to handle system content differently
|
|
# Anthropic expects system content in a top-level parameter, not as a message role
|
|
try:
|
|
# Create messages without system role for Anthropic
|
|
anthropic_messages = []
|
|
if hasattr(self, 'userLanguage') and self.userLanguage:
|
|
ltext = f"Please respond in '{self.userLanguage}' language."
|
|
if context:
|
|
# Combine context and language instruction
|
|
full_context = f"{ltext}\n\n{context}"
|
|
else:
|
|
full_context = ltext
|
|
else:
|
|
full_context = context
|
|
|
|
# Add user message
|
|
anthropic_messages.append({
|
|
"role": "user",
|
|
"content": prompt
|
|
})
|
|
|
|
# Call Anthropic - let the connector handle system content conversion
|
|
if full_context:
|
|
# Send context as part of the user message for Anthropic
|
|
enhanced_prompt = f"Context:\n{full_context}\n\nUser Request:\n{prompt}"
|
|
response = await self.anthropicService.callAiBasic([
|
|
{"role": "user", "content": enhanced_prompt}
|
|
])
|
|
else:
|
|
response = await self.anthropicService.callAiBasic(anthropic_messages)
|
|
|
|
return response["choices"][0]["message"]["content"]
|
|
except Exception as e:
|
|
err_str = str(e)
|
|
logger.warning(f"[UI NOTICE] Advanced AI failed, falling back to Basic AI (OpenAI). Reason: {err_str}")
|
|
# Fallback to OpenAI basic, but only if we're not already in a fallback
|
|
if not _is_fallback:
|
|
return await self.callAiTextBasic(prompt, context)
|
|
else:
|
|
# If we're already in a fallback, return error to prevent infinite recursion
|
|
logger.error("Both AI services failed, cannot provide fallback")
|
|
return f"Error: Both AI services failed. Anthropic error: {err_str}"
|
|
|
|
async def callAiImageBasic(self, prompt: str, imageData: Union[str, bytes], mimeType: str = None) -> str:
|
|
"""
|
|
Basic image processing using OpenAI.
|
|
|
|
Args:
|
|
prompt: The prompt for image analysis
|
|
imageData: The image data (file path or bytes)
|
|
mimeType: Optional MIME type of the image
|
|
|
|
Returns:
|
|
The AI response as text
|
|
"""
|
|
try:
|
|
return await self.openaiService.callAiImage(prompt, imageData, mimeType)
|
|
except Exception as e:
|
|
logger.error(f"Error in OpenAI image call: {str(e)}")
|
|
return f"Error: {str(e)}"
|
|
|
|
async def callAiImageAdvanced(self, prompt: str, imageData: Union[str, bytes], mimeType: str = None) -> str:
|
|
"""
|
|
Advanced image processing using Anthropic.
|
|
|
|
Args:
|
|
prompt: The prompt for image analysis
|
|
imageData: The image data (file path or bytes)
|
|
mimeType: Optional MIME type of the image
|
|
|
|
Returns:
|
|
The AI response as text
|
|
"""
|
|
try:
|
|
return await self.anthropicService.callAiImage(prompt, imageData, mimeType)
|
|
except Exception as e:
|
|
logger.error(f"Error in Anthropic image call: {str(e)}")
|
|
return f"Error: {str(e)}"
|
|
|