automation and ai and some planning and fixes adn docs reorg

This commit is contained in:
IGNY8 VPS (Salman)
2025-12-29 01:41:36 +00:00
parent 748de099dd
commit 53fdebf733
20 changed files with 927 additions and 4288 deletions

View File

@@ -121,14 +121,14 @@ class AIProcessor:
temperature: float = 0.7,
response_format: Optional[Dict] = None,
api_key: Optional[str] = None,
function_id: Optional[str] = None,
prompt_prefix: Optional[str] = None,
response_steps=None
) -> Dict[str, Any]:
"""
Internal method to call OpenAI API.
EXACT match to reference plugin's igny8_call_openai() function.
Endpoint: https://api.openai.com/v1/chat/completions
Returns:
Dict with 'content', 'input_tokens', 'output_tokens', 'total_tokens', 'model', 'cost', 'error', 'api_id'
"""
@@ -159,12 +159,12 @@ class AIProcessor:
'Content-Type': 'application/json',
}
# Add function_id to prompt if provided (for tracking)
# Add prompt_prefix to prompt if provided (for tracking)
# Format: ##GP01-Clustering or ##CP01-Clustering
final_prompt = prompt
if function_id:
function_id_prefix = f'function_id: "{function_id}"\n\n'
final_prompt = function_id_prefix + prompt
logger.info(f"Added function_id to prompt: {function_id}")
if prompt_prefix:
final_prompt = f'{prompt_prefix}\n\n{prompt}'
logger.info(f"Added prompt prefix: {prompt_prefix}")
# EXACT request format from reference plugin (openai-api.php line 402-404)
body_data = {
@@ -463,13 +463,15 @@ class AIProcessor:
Returns:
Dict with 'content', 'tokens_used', 'model', 'cost', 'error'
"""
# Generate function_id for tracking (ai-generate-content-03 for AIProcessor path)
function_id = "ai-generate-content-03"
# Generate prompt prefix for tracking (e.g., ##GP03-ContentGen or ##CP03-ContentGen)
from igny8_core.ai.prompts import get_prompt_prefix_for_function
prompt_prefix = get_prompt_prefix_for_function('generate_content', account=self.account)
# Get response_format from settings for generate_content
from igny8_core.ai.settings import get_model_config
model_config = get_model_config('generate_content')
model_config = get_model_config('generate_content', account=self.account)
response_format = model_config.get('response_format')
result = self._call_openai(prompt, model, max_tokens, temperature, response_format=response_format, function_id=function_id)
result = self._call_openai(prompt, model, max_tokens, temperature, response_format=response_format, prompt_prefix=prompt_prefix)
return {
'content': result.get('content', ''),