testign promtps
This commit is contained in:
@@ -113,7 +113,7 @@ class AICore:
|
||||
self,
|
||||
prompt: str,
|
||||
model: str,
|
||||
max_tokens: int = 4000,
|
||||
max_tokens: int = 8192,
|
||||
temperature: float = 0.7,
|
||||
response_format: Optional[Dict] = None,
|
||||
api_key: Optional[str] = None,
|
||||
|
||||
@@ -100,7 +100,7 @@ def get_model_config(function_name: str, account) -> Dict[str, Any]:
|
||||
pass
|
||||
|
||||
# Get max_tokens and temperature from config (with reasonable defaults for API)
|
||||
max_tokens = config.get('max_tokens', 4000) # Reasonable default for API limits
|
||||
max_tokens = config.get('max_tokens', 16384) # Maximum for long-form content generation (2000-3000 words)
|
||||
temperature = config.get('temperature', 0.7) # Reasonable default
|
||||
|
||||
# Build response format based on model (JSON mode for supported models)
|
||||
|
||||
Reference in New Issue
Block a user