123 lines
4.5 KiB
Python
123 lines
4.5 KiB
Python
"""
|
|
AI Settings - Centralized model configurations and limits
|
|
Uses IntegrationSettings only - no hardcoded defaults or fallbacks.
|
|
"""
|
|
from typing import Dict, Any
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Function name aliases (for backward compatibility)
|
|
FUNCTION_ALIASES = {
|
|
"cluster_keywords": "auto_cluster",
|
|
"auto_cluster_keywords": "auto_cluster",
|
|
"auto_generate_ideas": "generate_ideas",
|
|
"auto_generate_content": "generate_content",
|
|
"auto_generate_images": "generate_images",
|
|
}
|
|
|
|
|
|
def get_model_config(function_name: str, account) -> Dict[str, Any]:
|
|
"""
|
|
Get model configuration from IntegrationSettings.
|
|
Falls back to system account (aws-admin) if user account doesn't have settings.
|
|
|
|
Args:
|
|
function_name: Name of the AI function
|
|
account: Account instance (required)
|
|
|
|
Returns:
|
|
dict: Model configuration with 'model', 'max_tokens', 'temperature'
|
|
|
|
Raises:
|
|
ValueError: If account not provided or IntegrationSettings not configured
|
|
"""
|
|
if not account:
|
|
raise ValueError("Account is required for model configuration")
|
|
|
|
# Resolve function alias
|
|
actual_name = FUNCTION_ALIASES.get(function_name, function_name)
|
|
|
|
# Get IntegrationSettings for OpenAI - try user account first
|
|
integration_settings = None
|
|
try:
|
|
from igny8_core.modules.system.models import IntegrationSettings
|
|
integration_settings = IntegrationSettings.objects.filter(
|
|
integration_type='openai',
|
|
account=account,
|
|
is_active=True
|
|
).first()
|
|
except Exception as e:
|
|
logger.warning(f"Could not load OpenAI settings for account {account.id}: {e}")
|
|
|
|
# Fallback to system account (aws-admin, default-account, or default)
|
|
if not integration_settings:
|
|
logger.info(f"No OpenAI settings for account {account.id}, falling back to system account")
|
|
try:
|
|
from igny8_core.auth.models import Account
|
|
from igny8_core.modules.system.models import IntegrationSettings
|
|
for slug in ['aws-admin', 'default-account', 'default']:
|
|
system_account = Account.objects.filter(slug=slug).first()
|
|
if system_account:
|
|
integration_settings = IntegrationSettings.objects.filter(
|
|
integration_type='openai',
|
|
account=system_account,
|
|
is_active=True
|
|
).first()
|
|
if integration_settings:
|
|
logger.info(f"Using OpenAI settings from system account: {slug}")
|
|
break
|
|
except Exception as e:
|
|
logger.warning(f"Could not load system account OpenAI settings: {e}")
|
|
|
|
# If still no settings found, raise error
|
|
if not integration_settings:
|
|
raise ValueError(
|
|
f"OpenAI IntegrationSettings not configured for account {account.id} or system account. "
|
|
f"Please configure OpenAI settings in the integration page."
|
|
)
|
|
|
|
config = integration_settings.config or {}
|
|
|
|
# Get model from config
|
|
model = config.get('model')
|
|
if not model:
|
|
raise ValueError(
|
|
f"Model not configured in IntegrationSettings for account {account.id}. "
|
|
f"Please set 'model' in OpenAI integration settings."
|
|
)
|
|
|
|
# Validate model is in our supported list (optional validation)
|
|
try:
|
|
from igny8_core.utils.ai_processor import MODEL_RATES
|
|
if model not in MODEL_RATES:
|
|
logger.warning(
|
|
f"Model '{model}' for account {account.id} is not in supported list. "
|
|
f"Supported models: {list(MODEL_RATES.keys())}"
|
|
)
|
|
except ImportError:
|
|
# MODEL_RATES not available - skip validation
|
|
pass
|
|
|
|
# Get max_tokens and temperature from config (standardized to 8192)
|
|
max_tokens = config.get('max_tokens', 8192) # Standardized across entire codebase
|
|
temperature = config.get('temperature', 0.7) # Reasonable default
|
|
|
|
# Build response format based on model (JSON mode for supported models)
|
|
response_format = None
|
|
try:
|
|
from igny8_core.ai.constants import JSON_MODE_MODELS
|
|
if model in JSON_MODE_MODELS:
|
|
response_format = {"type": "json_object"}
|
|
except ImportError:
|
|
# JSON_MODE_MODELS not available - skip
|
|
pass
|
|
|
|
return {
|
|
'model': model,
|
|
'max_tokens': max_tokens,
|
|
'temperature': temperature,
|
|
'response_format': response_format,
|
|
}
|
|
|