Implement V2 AI functions and enhance progress handling
- Added support for new V2 functions: `auto_cluster_v2` and `generate_ideas_v2`, including backend logic and API endpoints. - Updated model configuration to ensure V2 functions validate the presence of models before execution. - Enhanced progress modal to provide better feedback during asynchronous tasks, including task IDs for debugging. - Updated frontend components to integrate new V2 functionalities and improve user experience with clustering and idea generation.
This commit is contained in:
@@ -82,10 +82,8 @@ class AIEngine:
|
||||
ai_core = AICore(account=self.account)
|
||||
function_name = fn.get_name()
|
||||
|
||||
# Generate function_id for tracking (ai-{function_name}-01)
|
||||
# Normalize underscores to hyphens to match frontend tracking IDs
|
||||
function_id_base = function_name.replace('_', '-')
|
||||
function_id = f"ai-{function_id_base}-01-desktop"
|
||||
# Generate function_id for tracking (ai_{function_name})
|
||||
function_id = f"ai_{function_name}"
|
||||
|
||||
# Get model config from settings (Stage 4 requirement)
|
||||
# Pass account to read model from IntegrationSettings
|
||||
@@ -111,6 +109,18 @@ class AIEngine:
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# For V2 functions: Validate model exists - no default, only execute if model is present
|
||||
if function_name.endswith('_v2'):
|
||||
if not model_from_integration or not model:
|
||||
error_msg = "AI model not configured. Please configure OpenAI model in Integration settings."
|
||||
self.console_tracker.error('ModelError', error_msg)
|
||||
self.step_tracker.add_request_step("PREP", "error", error_msg)
|
||||
self.tracker.error(error_msg, meta=self.step_tracker.get_meta())
|
||||
return {
|
||||
'success': False,
|
||||
'error': error_msg
|
||||
}
|
||||
|
||||
# Track configured model information so it shows in the progress modal
|
||||
self.step_tracker.add_request_step(
|
||||
"PREP",
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
"""
|
||||
Auto Cluster Keywords V2 - Workflow Function
|
||||
Uses helpers folder imports and dynamic model loading
|
||||
Max 50 keywords for bulk actions
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from django.db import transaction
|
||||
from igny8_core.ai.helpers.base import BaseAIFunction
|
||||
from igny8_core.modules.planner.models import Keywords, Clusters
|
||||
from igny8_core.ai.helpers.ai_core import AICore
|
||||
from igny8_core.ai.prompts import PromptRegistry
|
||||
from igny8_core.ai.helpers.settings import get_model_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutoClusterV2Function(BaseAIFunction):
|
||||
"""Auto-cluster keywords using AI - V2 with dynamic model"""
|
||||
|
||||
def get_name(self) -> str:
|
||||
return 'auto_cluster_v2'
|
||||
|
||||
def get_metadata(self) -> Dict:
|
||||
return {
|
||||
'display_name': 'Keywords Clustering',
|
||||
'description': 'Group related keywords into semantic clusters',
|
||||
'phases': {
|
||||
'INIT': 'Validating keywords...',
|
||||
'PREP': 'Loading keyword data...',
|
||||
'AI_CALL': 'Analyzing relationships with AI...',
|
||||
'PARSE': 'Processing cluster results...',
|
||||
'SAVE': 'Creating clusters...',
|
||||
'DONE': 'Clustering completed successfully'
|
||||
}
|
||||
}
|
||||
|
||||
def get_max_items(self) -> int:
|
||||
return 50 # Max 50 keywords
|
||||
|
||||
def validate(self, payload: dict, account=None) -> Dict:
|
||||
"""Validate input with max 50 keywords"""
|
||||
ids = payload.get('ids', [])
|
||||
if not ids:
|
||||
return {'valid': False, 'error': 'No keywords selected'}
|
||||
|
||||
if len(ids) > 50:
|
||||
return {'valid': False, 'error': 'Maximum 50 keywords allowed for clustering'}
|
||||
|
||||
# Check keywords exist
|
||||
queryset = Keywords.objects.filter(id__in=ids)
|
||||
if account:
|
||||
queryset = queryset.filter(account=account)
|
||||
keywords = queryset
|
||||
|
||||
if keywords.count() != len(ids):
|
||||
return {'valid': False, 'error': 'Some selected keywords not found'}
|
||||
|
||||
return {'valid': True}
|
||||
|
||||
def prepare(self, payload: dict, account=None) -> Dict:
|
||||
"""Load keywords with relationships"""
|
||||
ids = payload.get('ids', [])
|
||||
sector_id = payload.get('sector_id')
|
||||
|
||||
queryset = Keywords.objects.filter(id__in=ids)
|
||||
if account:
|
||||
queryset = queryset.filter(account=account)
|
||||
if sector_id:
|
||||
queryset = queryset.filter(sector_id=sector_id)
|
||||
|
||||
keywords = list(queryset.select_related('seed_keyword', 'cluster', 'account', 'site', 'sector'))
|
||||
|
||||
if not keywords:
|
||||
raise ValueError("No keywords found")
|
||||
|
||||
keyword_data = []
|
||||
for kw in keywords:
|
||||
keyword_data.append({
|
||||
'id': kw.id,
|
||||
'keyword': kw.keyword,
|
||||
'volume': kw.volume,
|
||||
'difficulty': kw.difficulty,
|
||||
'intent': kw.seed_keyword.intent if kw.seed_keyword else None,
|
||||
})
|
||||
|
||||
return {
|
||||
'keywords': keywords, # Store original objects
|
||||
'keyword_data': keyword_data,
|
||||
'sector_id': sector_id
|
||||
}
|
||||
|
||||
def build_prompt(self, data: Dict, account=None) -> str:
|
||||
"""Build clustering prompt"""
|
||||
keyword_data = data.get('keyword_data', [])
|
||||
sector_id = data.get('sector_id')
|
||||
|
||||
# Format keywords
|
||||
keywords_text = '\n'.join([
|
||||
f"- {kw['keyword']} (Volume: {kw['volume']}, Difficulty: {kw['difficulty']}, Intent: {kw.get('intent', 'N/A')})"
|
||||
for kw in keyword_data
|
||||
])
|
||||
|
||||
# Build context
|
||||
context = {'KEYWORDS': keywords_text}
|
||||
|
||||
# Add sector context if available
|
||||
if sector_id:
|
||||
try:
|
||||
from igny8_core.auth.models import Sector
|
||||
sector = Sector.objects.get(id=sector_id)
|
||||
if sector:
|
||||
context['SECTOR'] = sector.name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Get prompt from registry
|
||||
prompt = PromptRegistry.get_prompt(
|
||||
function_name='auto_cluster',
|
||||
account=account,
|
||||
context=context
|
||||
)
|
||||
|
||||
# Ensure JSON format instruction
|
||||
prompt_lower = prompt.lower()
|
||||
has_json_request = (
|
||||
'json' in prompt_lower and
|
||||
('format' in prompt_lower or 'respond' in prompt_lower or 'return' in prompt_lower or 'output' in prompt_lower)
|
||||
)
|
||||
|
||||
if not has_json_request:
|
||||
prompt += "\n\nIMPORTANT: You must respond with valid JSON only. The response must be a JSON object with a 'clusters' array."
|
||||
|
||||
return prompt
|
||||
|
||||
def parse_response(self, response: str, step_tracker=None) -> List[Dict]:
|
||||
"""Parse AI response into cluster data"""
|
||||
if not response or not response.strip():
|
||||
raise ValueError("Empty response from AI")
|
||||
|
||||
ai_core = AICore(account=getattr(self, 'account', None))
|
||||
json_data = ai_core.extract_json(response)
|
||||
|
||||
if not json_data or 'clusters' not in json_data:
|
||||
raise ValueError("Invalid response format: missing 'clusters' array")
|
||||
|
||||
return json_data['clusters']
|
||||
|
||||
def save_output(self, parsed: List[Dict], original_data: Any, account=None, step_tracker=None) -> Dict:
|
||||
"""Save clusters and update keywords"""
|
||||
keywords = original_data.get('keywords', [])
|
||||
keyword_map = {kw.id: kw for kw in keywords}
|
||||
|
||||
clusters_created = 0
|
||||
keywords_updated = 0
|
||||
|
||||
with transaction.atomic():
|
||||
for cluster_data in parsed:
|
||||
cluster_name = cluster_data.get('name', 'Unnamed Cluster')
|
||||
cluster_keywords = cluster_data.get('keywords', [])
|
||||
|
||||
if not cluster_keywords:
|
||||
continue
|
||||
|
||||
# Create cluster
|
||||
cluster = Clusters.objects.create(
|
||||
name=cluster_name,
|
||||
description=f"Auto-clustered from {len(cluster_keywords)} keywords",
|
||||
account=account,
|
||||
status='active'
|
||||
)
|
||||
clusters_created += 1
|
||||
|
||||
# Update keywords
|
||||
for keyword_text in cluster_keywords:
|
||||
for kw in keywords:
|
||||
if kw.keyword.lower() == keyword_text.lower():
|
||||
kw.cluster = cluster
|
||||
kw.save()
|
||||
keywords_updated += 1
|
||||
break
|
||||
|
||||
return {
|
||||
'clusters_created': clusters_created,
|
||||
'keywords_updated': keywords_updated,
|
||||
'count': clusters_created
|
||||
}
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
"""
|
||||
Generate Ideas V2 - Workflow Function
|
||||
Single cluster only, uses helpers folder imports
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from django.db import transaction
|
||||
from igny8_core.ai.helpers.base import BaseAIFunction
|
||||
from igny8_core.modules.planner.models import Clusters, ContentIdeas, Keywords
|
||||
from igny8_core.ai.helpers.ai_core import AICore
|
||||
from igny8_core.ai.prompts import PromptRegistry
|
||||
from igny8_core.ai.helpers.settings import get_model_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GenerateIdeasV2Function(BaseAIFunction):
|
||||
"""Generate content ideas from cluster - V2 with dynamic model"""
|
||||
|
||||
def get_name(self) -> str:
|
||||
return 'generate_ideas_v2'
|
||||
|
||||
def get_metadata(self) -> Dict:
|
||||
return {
|
||||
'display_name': 'Generate Ideas',
|
||||
'description': 'Generate SEO-optimized content ideas from keyword cluster',
|
||||
'phases': {
|
||||
'INIT': 'Validating cluster...',
|
||||
'PREP': 'Loading cluster data...',
|
||||
'AI_CALL': 'Generating ideas with AI...',
|
||||
'PARSE': 'Processing idea results...',
|
||||
'SAVE': 'Saving ideas...',
|
||||
'DONE': 'Ideas generated successfully'
|
||||
}
|
||||
}
|
||||
|
||||
def get_max_items(self) -> int:
|
||||
return 1 # Single cluster only
|
||||
|
||||
def validate(self, payload: dict, account=None) -> Dict:
|
||||
"""Validate single cluster"""
|
||||
ids = payload.get('ids', [])
|
||||
if not ids:
|
||||
return {'valid': False, 'error': 'No cluster selected'}
|
||||
|
||||
if len(ids) > 1:
|
||||
return {'valid': False, 'error': 'Only one cluster can be processed at a time'}
|
||||
|
||||
queryset = Clusters.objects.filter(id=ids[0])
|
||||
if account:
|
||||
queryset = queryset.filter(account=account)
|
||||
cluster = queryset.first()
|
||||
|
||||
if not cluster:
|
||||
return {'valid': False, 'error': 'Cluster not found'}
|
||||
|
||||
return {'valid': True}
|
||||
|
||||
def prepare(self, payload: dict, account=None) -> Dict:
|
||||
"""Load cluster with keywords"""
|
||||
cluster_id = payload.get('ids', [])[0]
|
||||
queryset = Clusters.objects.filter(id=cluster_id)
|
||||
if account:
|
||||
queryset = queryset.filter(account=account)
|
||||
|
||||
cluster = queryset.prefetch_related('keywords__seed_keyword').first()
|
||||
|
||||
if not cluster:
|
||||
raise ValueError("Cluster not found")
|
||||
|
||||
# Get keywords
|
||||
keyword_objects = Keywords.objects.filter(cluster=cluster).select_related('seed_keyword')
|
||||
keywords = []
|
||||
for kw in keyword_objects:
|
||||
keywords.append({
|
||||
'keyword': kw.seed_keyword.keyword if kw.seed_keyword else kw.keyword,
|
||||
'volume': kw.volume,
|
||||
'difficulty': kw.difficulty,
|
||||
})
|
||||
|
||||
return {
|
||||
'cluster': cluster, # Store original object
|
||||
'cluster_data': {
|
||||
'id': cluster.id,
|
||||
'name': cluster.name,
|
||||
'description': cluster.description or '',
|
||||
'keywords': keywords,
|
||||
}
|
||||
}
|
||||
|
||||
def build_prompt(self, data: Dict, account=None) -> str:
|
||||
"""Build idea generation prompt"""
|
||||
cluster_data = data.get('cluster_data', {})
|
||||
keywords = cluster_data.get('keywords', [])
|
||||
keyword_list = [kw['keyword'] for kw in keywords]
|
||||
|
||||
# Format clusters text
|
||||
clusters_text = f"Cluster ID: {cluster_data.get('id', '')} | Name: {cluster_data.get('name', '')} | Description: {cluster_data.get('description', '')}"
|
||||
|
||||
# Format cluster keywords
|
||||
cluster_keywords_text = f"Cluster ID: {cluster_data.get('id', '')} | Name: {cluster_data.get('name', '')} | Keywords: {', '.join(keyword_list)}"
|
||||
|
||||
# Get prompt from registry
|
||||
prompt = PromptRegistry.get_prompt(
|
||||
function_name='generate_ideas',
|
||||
account=account,
|
||||
context={
|
||||
'CLUSTERS': clusters_text,
|
||||
'CLUSTER_KEYWORDS': cluster_keywords_text,
|
||||
}
|
||||
)
|
||||
|
||||
return prompt
|
||||
|
||||
def parse_response(self, response: str, step_tracker=None) -> List[Dict]:
|
||||
"""Parse AI response into idea data"""
|
||||
if not response or not response.strip():
|
||||
raise ValueError("Empty response from AI")
|
||||
|
||||
ai_core = AICore(account=getattr(self, 'account', None))
|
||||
json_data = ai_core.extract_json(response)
|
||||
|
||||
if not json_data or 'ideas' not in json_data:
|
||||
raise ValueError("Invalid response format: missing 'ideas' array")
|
||||
|
||||
return json_data.get('ideas', [])
|
||||
|
||||
def save_output(self, parsed: List[Dict], original_data: Any, account=None, step_tracker=None) -> Dict:
|
||||
"""Save ideas to database"""
|
||||
cluster = original_data.get('cluster')
|
||||
if not cluster:
|
||||
raise ValueError("Cluster not found in original data")
|
||||
|
||||
ideas_created = 0
|
||||
|
||||
with transaction.atomic():
|
||||
for idea_data in parsed:
|
||||
ContentIdeas.objects.create(
|
||||
cluster=cluster,
|
||||
title=idea_data.get('title', 'Untitled Idea'),
|
||||
description=idea_data.get('description', ''),
|
||||
structure=idea_data.get('structure', 'article'),
|
||||
account=account,
|
||||
status='new'
|
||||
)
|
||||
ideas_created += 1
|
||||
|
||||
return {
|
||||
'ideas_created': ideas_created,
|
||||
'count': ideas_created
|
||||
}
|
||||
|
||||
@@ -34,6 +34,16 @@ MODEL_CONFIG = {
|
||||
"temperature": 0.7,
|
||||
"response_format": {"type": "json_object"},
|
||||
},
|
||||
"auto_cluster_v2": {
|
||||
"max_tokens": 3000,
|
||||
"temperature": 0.7,
|
||||
"response_format": {"type": "json_object"},
|
||||
},
|
||||
"generate_ideas_v2": {
|
||||
"max_tokens": 4000,
|
||||
"temperature": 0.7,
|
||||
"response_format": {"type": "json_object"},
|
||||
},
|
||||
}
|
||||
|
||||
# Function name aliases (for backward compatibility)
|
||||
@@ -86,7 +96,26 @@ def get_model_config(function_name: str, account=None) -> Dict[str, Any]:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning(f"Could not load model from IntegrationSettings: {e}", exc_info=True)
|
||||
|
||||
# Merge with defaults
|
||||
# For V2 functions: Don't use defaults - only return config if model is present
|
||||
if function_name.endswith('_v2'):
|
||||
# V2 functions require model from IntegrationSettings - no defaults
|
||||
if not model_from_settings:
|
||||
# Return config without model (will be validated in engine)
|
||||
return {
|
||||
"model": None,
|
||||
"max_tokens": config.get('max_tokens', 4000),
|
||||
"temperature": config.get('temperature', 0.7),
|
||||
"response_format": config.get('response_format'),
|
||||
}
|
||||
# Model exists, return config with model
|
||||
return {
|
||||
"model": model_from_settings,
|
||||
"max_tokens": config.get('max_tokens', 4000),
|
||||
"temperature": config.get('temperature', 0.7),
|
||||
"response_format": config.get('response_format'),
|
||||
}
|
||||
|
||||
# For non-V2 functions: Merge with defaults (backward compatibility)
|
||||
default_config = {
|
||||
"model": "gpt-4.1",
|
||||
"max_tokens": 4000,
|
||||
|
||||
@@ -89,8 +89,20 @@ def _load_generate_images():
|
||||
from igny8_core.ai.functions.generate_images import GenerateImagesFunction
|
||||
return GenerateImagesFunction
|
||||
|
||||
def _load_auto_cluster_v2():
|
||||
"""Lazy loader for auto_cluster_v2 function"""
|
||||
from igny8_core.ai.functions.workflow_functions.auto_cluster_v2 import AutoClusterV2Function
|
||||
return AutoClusterV2Function
|
||||
|
||||
def _load_generate_ideas_v2():
|
||||
"""Lazy loader for generate_ideas_v2 function"""
|
||||
from igny8_core.ai.functions.workflow_functions.generate_ideas_v2 import GenerateIdeasV2Function
|
||||
return GenerateIdeasV2Function
|
||||
|
||||
register_lazy_function('auto_cluster', _load_auto_cluster)
|
||||
register_lazy_function('generate_ideas', _load_generate_ideas)
|
||||
register_lazy_function('generate_content', _load_generate_content)
|
||||
register_lazy_function('generate_images', _load_generate_images)
|
||||
register_lazy_function('auto_cluster_v2', _load_auto_cluster_v2)
|
||||
register_lazy_function('generate_ideas_v2', _load_generate_ideas_v2)
|
||||
|
||||
|
||||
@@ -571,6 +571,118 @@ class KeywordViewSet(SiteSectorModelViewSet):
|
||||
'error': f'Unexpected error: {str(e)}'
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
@action(detail=False, methods=['post'], url_path='auto_cluster_v2', url_name='auto_cluster_v2')
|
||||
def auto_cluster_v2(self, request):
|
||||
"""Auto-cluster keywords V2 - New workflow function with max 50 keywords"""
|
||||
import logging
|
||||
from igny8_core.ai.tasks import run_ai_task
|
||||
from kombu.exceptions import OperationalError as KombuOperationalError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
# Get account
|
||||
account = getattr(request, 'account', None)
|
||||
account_id = account.id if account else None
|
||||
|
||||
# Check model exists - no default, only execute if model is present
|
||||
if account:
|
||||
from igny8_core.modules.system.models import IntegrationSettings
|
||||
openai_settings = IntegrationSettings.objects.filter(
|
||||
integration_type='openai',
|
||||
account=account,
|
||||
is_active=True
|
||||
).first()
|
||||
if not openai_settings or not openai_settings.config or not openai_settings.config.get('model'):
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'AI model not configured. Please configure OpenAI model in Integration settings.'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Prepare payload
|
||||
payload = {
|
||||
'ids': request.data.get('ids', []),
|
||||
'sector_id': request.data.get('sector_id')
|
||||
}
|
||||
|
||||
logger.info(f"auto_cluster_v2 called with ids={payload['ids']}, sector_id={payload.get('sector_id')}")
|
||||
|
||||
# Validate basic input
|
||||
if not payload['ids']:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'No keywords selected'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if len(payload['ids']) > 50:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'Maximum 50 keywords allowed for clustering'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Try to queue Celery task
|
||||
try:
|
||||
if hasattr(run_ai_task, 'delay'):
|
||||
task = run_ai_task.delay(
|
||||
function_name='auto_cluster_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
logger.info(f"Task queued: {task.id}")
|
||||
return Response({
|
||||
'success': True,
|
||||
'task_id': str(task.id),
|
||||
'message': 'Clustering started'
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
# Celery not available - execute synchronously
|
||||
logger.warning("Celery not available, executing synchronously")
|
||||
result = run_ai_task(
|
||||
function_name='auto_cluster_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
if result.get('success'):
|
||||
return Response({
|
||||
'success': True,
|
||||
**result
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': result.get('error', 'Clustering failed')
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except (KombuOperationalError, ConnectionError) as e:
|
||||
# Broker connection failed - fall back to synchronous execution
|
||||
logger.warning(f"Celery broker unavailable, falling back to synchronous execution: {str(e)}")
|
||||
result = run_ai_task(
|
||||
function_name='auto_cluster_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
if result.get('success'):
|
||||
return Response({
|
||||
'success': True,
|
||||
**result
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': result.get('error', 'Clustering failed')
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in auto_cluster_v2: {str(e)}", exc_info=True)
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in auto_cluster_v2: {str(e)}", exc_info=True)
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': f'Unexpected error: {str(e)}'
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
class ClusterViewSet(SiteSectorModelViewSet):
|
||||
"""
|
||||
@@ -810,6 +922,117 @@ class ClusterViewSet(SiteSectorModelViewSet):
|
||||
'success': False,
|
||||
'error': f'Unexpected error: {str(e)}'
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
@action(detail=False, methods=['post'], url_path='generate_ideas_v2', url_name='generate_ideas_v2')
|
||||
def generate_ideas_v2(self, request):
|
||||
"""Generate ideas V2 - Single cluster only"""
|
||||
import logging
|
||||
from igny8_core.ai.tasks import run_ai_task
|
||||
from kombu.exceptions import OperationalError as KombuOperationalError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
# Get account
|
||||
account = getattr(request, 'account', None)
|
||||
account_id = account.id if account else None
|
||||
|
||||
# Check model exists - no default, only execute if model is present
|
||||
if account:
|
||||
from igny8_core.modules.system.models import IntegrationSettings
|
||||
openai_settings = IntegrationSettings.objects.filter(
|
||||
integration_type='openai',
|
||||
account=account,
|
||||
is_active=True
|
||||
).first()
|
||||
if not openai_settings or not openai_settings.config or not openai_settings.config.get('model'):
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'AI model not configured. Please configure OpenAI model in Integration settings.'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Prepare payload
|
||||
payload = {
|
||||
'ids': request.data.get('ids', []),
|
||||
}
|
||||
|
||||
logger.info(f"generate_ideas_v2 called with ids={payload['ids']}")
|
||||
|
||||
# Validate basic input - exactly one cluster
|
||||
if not payload['ids']:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'No cluster selected'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if len(payload['ids']) > 1:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': 'Only one cluster can be processed at a time'
|
||||
}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Try to queue Celery task
|
||||
try:
|
||||
if hasattr(run_ai_task, 'delay'):
|
||||
task = run_ai_task.delay(
|
||||
function_name='generate_ideas_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
logger.info(f"Task queued: {task.id}")
|
||||
return Response({
|
||||
'success': True,
|
||||
'task_id': str(task.id),
|
||||
'message': 'Idea generation started'
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
# Celery not available - execute synchronously
|
||||
logger.warning("Celery not available, executing synchronously")
|
||||
result = run_ai_task(
|
||||
function_name='generate_ideas_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
if result.get('success'):
|
||||
return Response({
|
||||
'success': True,
|
||||
**result
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': result.get('error', 'Idea generation failed')
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except (KombuOperationalError, ConnectionError) as e:
|
||||
# Broker connection failed - fall back to synchronous execution
|
||||
logger.warning(f"Celery broker unavailable, falling back to synchronous execution: {str(e)}")
|
||||
result = run_ai_task(
|
||||
function_name='generate_ideas_v2',
|
||||
payload=payload,
|
||||
account_id=account_id
|
||||
)
|
||||
if result.get('success'):
|
||||
return Response({
|
||||
'success': True,
|
||||
**result
|
||||
}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': result.get('error', 'Idea generation failed')
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in generate_ideas_v2: {str(e)}", exc_info=True)
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in generate_ideas_v2: {str(e)}", exc_info=True)
|
||||
return Response({
|
||||
'success': False,
|
||||
'error': f'Unexpected error: {str(e)}'
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user