82 lines
2.6 KiB
Python
82 lines
2.6 KiB
Python
"""
|
|
Base connector interface for AI connectors.
|
|
All AI connectors should inherit from this class.
|
|
"""
|
|
|
|
from abc import ABC, abstractmethod
|
|
from typing import List, Dict, Any, Optional
|
|
from modules.datamodels.datamodelAi import AiModel
|
|
|
|
|
|
class BaseConnectorAi(ABC):
|
|
"""Base class for all AI connectors."""
|
|
|
|
def __init__(self):
|
|
self._models_cache: Optional[List[AiModel]] = None
|
|
self._last_cache_update: Optional[float] = None
|
|
self._cache_ttl: float = 300.0 # 5 minutes cache TTL
|
|
|
|
@abstractmethod
|
|
def getModels(self) -> List[AiModel]:
|
|
"""
|
|
Get all available models for this connector.
|
|
Should be implemented by each connector.
|
|
"""
|
|
pass
|
|
|
|
@abstractmethod
|
|
def getConnectorType(self) -> str:
|
|
"""
|
|
Get the connector type identifier.
|
|
Should return one of: openai, anthropic, perplexity, tavily
|
|
"""
|
|
pass
|
|
|
|
def getCachedModels(self) -> List[AiModel]:
|
|
"""
|
|
Get cached models with TTL check.
|
|
Returns cached models if still valid, otherwise refreshes cache.
|
|
"""
|
|
import time
|
|
|
|
current_time = time.time()
|
|
|
|
# Check if cache is valid
|
|
if (self._models_cache is not None and
|
|
self._last_cache_update is not None and
|
|
current_time - self._last_cache_update < self._cache_ttl):
|
|
return self._models_cache
|
|
|
|
# Refresh cache
|
|
self._models_cache = self.getModels()
|
|
self._last_cache_update = current_time
|
|
|
|
return self._models_cache
|
|
|
|
def clearCache(self):
|
|
"""Clear the models cache."""
|
|
self._models_cache = None
|
|
self._last_cache_update = None
|
|
|
|
def getModelByName(self, name: str) -> Optional[AiModel]:
|
|
"""Get a specific model by name."""
|
|
models = self.getCachedModels()
|
|
for model in models:
|
|
if model.name == name:
|
|
return model
|
|
return None
|
|
|
|
def getModelsByCapability(self, capability: str) -> List[AiModel]:
|
|
"""Get models that support a specific capability."""
|
|
models = self.getCachedModels()
|
|
return [model for model in models if capability in model.capabilities]
|
|
|
|
def getModelsByTag(self, tag: str) -> List[AiModel]:
|
|
"""Get models that have a specific tag."""
|
|
models = self.getCachedModels()
|
|
return [model for model in models if tag in model.tags]
|
|
|
|
def getAvailableModels(self) -> List[AiModel]:
|
|
"""Get only available models."""
|
|
models = self.getCachedModels()
|
|
return [model for model in models if model.isAvailable]
|