removeed ifeadea generaetion fucntion

This commit is contained in:
alorig
2025-11-10 00:18:50 +05:00
parent 9fe6153c22
commit 71487575bd
3 changed files with 6 additions and 941 deletions

View File

@@ -1,303 +0,0 @@
"""
Generate Ideas AI Function
Extracted from modules/planner/tasks.py
"""
import logging
import json
from typing import Dict, List, Any
from django.db import transaction
from igny8_core.ai.base import BaseAIFunction
from igny8_core.modules.planner.models import Clusters, ContentIdeas
from igny8_core.ai.ai_core import AICore
from igny8_core.ai.validators import validate_cluster_exists
from igny8_core.ai.tracker import ConsoleStepTracker
from igny8_core.ai.prompts import PromptRegistry
from igny8_core.ai.settings import get_model_config
logger = logging.getLogger(__name__)
class GenerateIdeasFunction(BaseAIFunction):
"""Generate content ideas from clusters using AI"""
def get_name(self) -> str:
return 'generate_ideas'
def get_metadata(self) -> Dict:
return {
'display_name': 'Generate Ideas',
'description': 'Generate SEO-optimized content ideas from keyword clusters',
'phases': {
'INIT': 'Initializing idea generation...',
'PREP': 'Loading clusters...',
'AI_CALL': 'Generating ideas with AI...',
'PARSE': 'Parsing idea data...',
'SAVE': 'Saving ideas...',
'DONE': 'Ideas generated!'
}
}
def get_max_items(self) -> int:
# No limit - return None
return None
def validate(self, payload: dict, account=None) -> Dict:
"""Validate cluster IDs"""
result = super().validate(payload, account)
if not result['valid']:
return result
# Check cluster exists
cluster_ids = payload.get('ids', [])
if cluster_ids:
cluster_id = cluster_ids[0] # For single cluster idea generation
cluster_result = validate_cluster_exists(cluster_id, account)
if not cluster_result['valid']:
return cluster_result
# Removed plan limits check
return {'valid': True}
def prepare(self, payload: dict, account=None) -> Dict:
"""Load cluster with keywords"""
cluster_ids = payload.get('ids', [])
if not cluster_ids:
raise ValueError("No cluster IDs provided")
cluster_id = cluster_ids[0] # Single cluster for now
queryset = Clusters.objects.filter(id=cluster_id)
if account:
queryset = queryset.filter(account=account)
cluster = queryset.select_related('sector', 'account', 'site').prefetch_related('keywords').first()
if not cluster:
raise ValueError("Cluster not found")
# Get keywords for this cluster
from igny8_core.modules.planner.models import Keywords
keywords = Keywords.objects.filter(cluster=cluster).values_list('keyword', flat=True)
# Format cluster data for AI
cluster_data = [{
'id': cluster.id,
'name': cluster.name,
'description': cluster.description or '',
'keywords': list(keywords),
}]
return {
'cluster': cluster,
'cluster_data': cluster_data,
'account': account or cluster.account
}
def build_prompt(self, data: Dict, account=None) -> str:
"""Build ideas generation prompt using registry"""
cluster_data = data['cluster_data']
account = account or data.get('account')
# Format clusters text
clusters_text = '\n'.join([
f"Cluster ID: {c.get('id', '')} | Name: {c.get('name', '')} | Description: {c.get('description', '')}"
for c in cluster_data
])
# Format cluster keywords
cluster_keywords_text = '\n'.join([
f"Cluster ID: {c.get('id', '')} | Name: {c.get('name', '')} | Keywords: {', '.join(c.get('keywords', []))}"
for c in cluster_data
])
# Get prompt from registry with context
prompt = PromptRegistry.get_prompt(
function_name='generate_ideas',
account=account,
context={
'CLUSTERS': clusters_text,
'CLUSTER_KEYWORDS': cluster_keywords_text,
}
)
return prompt
def parse_response(self, response: str, step_tracker=None) -> List[Dict]:
"""Parse AI response into idea data"""
ai_core = AICore(account=self.account if hasattr(self, 'account') else None)
json_data = ai_core.extract_json(response)
if not json_data or 'ideas' not in json_data:
error_msg = f"Failed to parse ideas response: {response[:200]}..."
logger.error(error_msg)
raise ValueError(error_msg)
return json_data.get('ideas', [])
def save_output(
self,
parsed: List[Dict],
original_data: Dict,
account=None,
progress_tracker=None,
step_tracker=None
) -> Dict:
"""Save ideas to database"""
cluster = original_data['cluster']
account = account or original_data.get('account')
if not account:
raise ValueError("Account is required for idea creation")
ideas_created = 0
with transaction.atomic():
for idea_data in parsed:
# Handle description - might be dict or string
description = idea_data.get('description', '')
if isinstance(description, dict):
description = json.dumps(description)
elif not isinstance(description, str):
description = str(description)
# Handle target_keywords
target_keywords = idea_data.get('covered_keywords', '') or idea_data.get('target_keywords', '')
# Create ContentIdeas record
ContentIdeas.objects.create(
idea_title=idea_data.get('title', 'Untitled Idea'),
description=description,
content_type=idea_data.get('content_type', 'blog_post'),
content_structure=idea_data.get('content_structure', 'supporting_page'),
target_keywords=target_keywords,
keyword_cluster=cluster,
estimated_word_count=idea_data.get('estimated_word_count', 1500),
status='new',
account=account,
site=cluster.site,
sector=cluster.sector,
)
ideas_created += 1
return {
'count': ideas_created,
'ideas_created': ideas_created
}
def generate_ideas_core(cluster_id: int, account_id: int = None, progress_callback=None):
"""
Core logic for generating ideas (legacy function signature for backward compatibility).
Can be called with or without Celery.
Args:
cluster_id: Cluster ID to generate idea for
account_id: Account ID for account isolation
progress_callback: Optional function to call for progress updates
Returns:
Dict with 'success', 'idea_created', 'message', etc.
"""
import sys
print("=" * 80, flush=True, file=sys.stdout)
print("[GENERATE IDEAS CORE] Function started", flush=True, file=sys.stdout)
print(f"[GENERATE IDEAS CORE] cluster_id: {cluster_id}", flush=True, file=sys.stdout)
print(f"[GENERATE IDEAS CORE] account_id: {account_id}", flush=True, file=sys.stdout)
print("=" * 80, flush=True, file=sys.stdout)
tracker = ConsoleStepTracker('generate_ideas')
tracker.init("Task started")
try:
from igny8_core.auth.models import Account
account = None
if account_id:
account = Account.objects.get(id=account_id)
tracker.prep("Loading account and cluster data...")
# Use the new function class
fn = GenerateIdeasFunction()
# Store account for use in methods
fn.account = account
# Prepare payload
payload = {'ids': [cluster_id]}
# Validate
tracker.prep("Validating input...")
validated = fn.validate(payload, account)
if not validated['valid']:
tracker.error('ValidationError', validated['error'])
return {'success': False, 'error': validated['error']}
# Prepare data
tracker.prep("Loading cluster with keywords...")
data = fn.prepare(payload, account)
# Build prompt
tracker.prep("Building prompt...")
prompt = fn.build_prompt(data, account)
# Get model config from settings
model_config = get_model_config('generate_ideas')
# Call AI using centralized request handler
ai_core = AICore(account=account)
result = ai_core.run_ai_request(
prompt=prompt,
model=model_config.get('model'),
max_tokens=model_config.get('max_tokens'),
temperature=model_config.get('temperature'),
response_format=model_config.get('response_format'),
function_name='generate_ideas',
tracker=tracker
)
if result.get('error'):
return {'success': False, 'error': result['error']}
# Parse response
tracker.parse("Parsing AI response...")
ideas_data = fn.parse_response(result['content'])
if not ideas_data:
tracker.error('ParseError', 'No ideas generated by AI')
return {'success': False, 'error': 'No ideas generated by AI'}
tracker.parse(f"Parsed {len(ideas_data)} idea(s)")
# Take first idea
idea_data = ideas_data[0]
# Save output
tracker.save("Saving idea to database...")
save_result = fn.save_output(ideas_data, data, account)
tracker.save(f"Saved {save_result['ideas_created']} idea(s)")
tracker.done(f"Idea '{idea_data.get('title', 'Untitled')}' created successfully")
return {
'success': True,
'idea_created': save_result['ideas_created'],
'message': f"Idea '{idea_data.get('title', 'Untitled')}' created"
}
except Exception as e:
import sys
import traceback
error_msg = str(e)
error_type = type(e).__name__
print("=" * 80, flush=True, file=sys.stdout)
print(f"[GENERATE IDEAS CORE] ERROR: {error_type}: {error_msg}", flush=True, file=sys.stdout)
print("[GENERATE IDEAS CORE] Full traceback:", flush=True, file=sys.stdout)
traceback.print_exc(file=sys.stdout)
print("=" * 80, flush=True, file=sys.stdout)
tracker.error('Exception', error_msg, e)
logger.error(f"Error in generate_ideas_core: {error_msg}", exc_info=True)
return {'success': False, 'error': error_msg, 'error_type': error_type}

View File

@@ -7,7 +7,6 @@ from typing import List
from django.db import transaction
from igny8_core.modules.planner.models import Keywords, Clusters, ContentIdeas
from igny8_core.utils.ai_processor import ai_processor
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
from igny8_core.ai.tracker import ConsoleStepTracker
logger = logging.getLogger(__name__)
@@ -729,456 +728,8 @@ def auto_cluster_keywords_task(self, keyword_ids: List[int], sector_id: int = No
return error_dict
@shared_task(bind=True, max_retries=3)
def auto_generate_ideas_task(self, cluster_ids: List[int], account_id: int = None):
"""
Celery task to generate content ideas for clusters using AI.
Args:
cluster_ids: List of cluster IDs
account_id: Account ID for account isolation
"""
import sys
print("=" * 80, flush=True, file=sys.stdout)
print("[CELERY TASK] auto_generate_ideas_task STARTED", flush=True, file=sys.stdout)
print(f"[CELERY TASK] Task ID: {self.request.id}", flush=True, file=sys.stdout)
print(f"[CELERY TASK] cluster_ids: {cluster_ids}", flush=True, file=sys.stdout)
print(f"[CELERY TASK] account_id: {account_id}", flush=True, file=sys.stdout)
print("=" * 80, flush=True, file=sys.stdout)
account_id = account_id
logger.info("=" * 80)
logger.info("auto_generate_ideas_task STARTED")
logger.info(f" - Task ID: {self.request.id}")
logger.info(f" - cluster_ids: {cluster_ids}")
logger.info(f" - account_id: {account_id}")
logger.info("=" * 80)
try:
from django.db import models
from django.db import connection
# Log database connection status
try:
connection.ensure_connection()
logger.info("Database connection: OK")
except Exception as db_error:
logger.error(f"Database connection error: {type(db_error).__name__}: {str(db_error)}")
raise
# Initialize progress
logger.info("Initializing task progress state...")
self.update_state(
state='PROGRESS',
meta={
'current': 0,
'total': len(cluster_ids),
'percentage': 0,
'message': 'Initializing content ideas generation...',
'phase': 'initializing'
}
)
# Get clusters with keywords and relationships (including site)
logger.info(f"Querying clusters with IDs: {cluster_ids}")
try:
clusters_queryset = Clusters.objects.filter(id__in=cluster_ids)
logger.info(f"Initial queryset count: {clusters_queryset.count()}")
if account_id:
clusters_queryset = clusters_queryset.filter(account_id=account_id)
logger.info(f"After account filter count: {clusters_queryset.count()}")
logger.info("Loading clusters with select_related...")
clusters = list(clusters_queryset.select_related('sector', 'account', 'site', 'sector__site'))
logger.info(f"Successfully loaded {len(clusters)} clusters")
# Log each cluster's details
for c in clusters:
account = getattr(c, 'account', None)
logger.info(f" Cluster {c.id}: name='{c.name}', account_id={account.id if account else 'None'}, site_id={c.site_id if c.site else 'None'}, sector_id={c.sector_id if c.sector else 'None'}")
except Exception as query_error:
logger.error(f"Error querying clusters: {type(query_error).__name__}: {str(query_error)}", exc_info=True)
raise
if not clusters:
logger.warning(f"No clusters found: {cluster_ids}")
return {'success': False, 'error': 'No clusters found'}
total_clusters = len(clusters)
# Update progress: Preparing clusters (0-10%)
self.update_state(
state='PROGRESS',
meta={
'current': 0,
'total': total_clusters,
'percentage': 5,
'message': f'Preparing {total_clusters} clusters for idea generation...',
'phase': 'preparing'
}
)
# Format cluster data for AI
cluster_data = []
for idx, cluster in enumerate(clusters):
# Get keywords for this cluster
keywords = Keywords.objects.filter(cluster=cluster).values_list('keyword', flat=True)
keywords_list = list(keywords)
cluster_item = {
'id': cluster.id,
'name': cluster.name,
'description': cluster.description or '',
'keywords': keywords_list,
}
cluster_data.append(cluster_item)
# Log cluster data being sent to AI
logger.info(f"Cluster {idx + 1}/{total_clusters} data for AI:")
logger.info(f" - ID: {cluster_item['id']}")
logger.info(f" - Name: {cluster_item['name']}")
logger.info(f" - Description: {cluster_item['description'][:100] if cluster_item['description'] else '(empty)'}...")
logger.info(f" - Keywords count: {len(keywords_list)}")
logger.info(f" - Keywords: {keywords_list[:5]}{'...' if len(keywords_list) > 5 else ''}")
account = getattr(cluster, 'account', None)
logger.info(f" - Cluster account: {account.id if account else 'None'}")
logger.info(f" - Cluster site: {cluster.site_id if cluster.site else 'None'}")
logger.info(f" - Cluster sector: {cluster.sector_id if cluster.sector else 'None'}")
# Update progress for each cluster preparation
progress_pct = 5 + int((idx / total_clusters) * 5)
self.update_state(
state='PROGRESS',
meta={
'current': idx + 1,
'total': total_clusters,
'percentage': progress_pct,
'message': f"Preparing cluster '{cluster.name}' ({idx + 1} of {total_clusters})...",
'phase': 'preparing',
'current_item': cluster.name
}
)
# Log clean request data before sending to AI
logger.info("=" * 80)
logger.info("CLEAN REQUEST DATA FOR AI (before sending request):")
logger.info("=" * 80)
import json
clean_data = {
'total_clusters': len(cluster_data),
'clusters': [
{
'id': c['id'],
'name': c['name'],
'description': c['description'][:200] if c['description'] else '(empty)',
'keywords_count': len(c['keywords']),
'keywords': c['keywords'],
}
for c in cluster_data
]
}
logger.info(json.dumps(clean_data, indent=2))
logger.info("=" * 80)
# Update progress: Generating ideas with AI (10-80%)
self.update_state(
state='PROGRESS',
meta={
'current': 0,
'total': total_clusters,
'percentage': 10,
'message': 'Generating content ideas with AI...',
'phase': 'generating'
}
)
# Use new AI framework with proper logging
account = clusters[0].account if clusters else None
account_id = account.id if account else None
# Process each cluster using the new framework
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
from igny8_core.ai.tasks import run_ai_task
ideas_created = 0
all_ideas = []
# Process each cluster individually using the new framework
for idx, cluster in enumerate(clusters):
cluster_id = cluster.id
logger.info(f"Processing cluster {idx + 1}/{total_clusters}: {cluster_id}")
# Update progress
progress_pct = 10 + int((idx / total_clusters) * 70)
self.update_state(
state='PROGRESS',
meta={
'current': idx + 1,
'total': total_clusters,
'percentage': progress_pct,
'message': f'Generating idea for cluster "{cluster.name}" ({idx + 1} of {total_clusters})...',
'phase': 'generating',
'current_item': cluster.name
}
)
# Use new framework - always use generate_ideas_core for proper console logging
try:
import sys
print(f"[CELERY TASK] Calling generate_ideas_core for cluster {cluster_id}...", flush=True, file=sys.stdout)
# Use generate_ideas_core which has ConsoleStepTracker built in
result = generate_ideas_core(cluster_id, account_id=account_id)
print(f"[CELERY TASK] generate_ideas_core returned: success={result.get('success')}, error={result.get('error')}", flush=True, file=sys.stdout)
if result.get('success'):
ideas_created += result.get('idea_created', 0)
logger.info(f"✓ Successfully generated idea for cluster {cluster_id}")
print(f"[CELERY TASK] ✓ Successfully generated idea for cluster {cluster_id}", flush=True, file=sys.stdout)
else:
error_msg = result.get('error', 'Unknown error')
logger.error(f"✗ Failed to generate idea for cluster {cluster_id}: {error_msg}")
print(f"[CELERY TASK] ✗ Failed to generate idea for cluster {cluster_id}: {error_msg}", flush=True, file=sys.stdout)
# Update task state with error for this cluster
self.update_state(
state='PROGRESS',
meta={
'current': idx + 1,
'total': total_clusters,
'percentage': progress_pct,
'message': f'Error generating idea for cluster "{cluster.name}": {error_msg}',
'phase': 'error',
'current_item': cluster.name,
'error': error_msg,
'error_type': result.get('error_type', 'GenerationError')
}
)
except Exception as e:
import sys
import traceback
error_msg = str(e)
error_type = type(e).__name__
print("=" * 80, flush=True, file=sys.stdout)
print(f"[CELERY TASK] EXCEPTION in generate_ideas_core call: {error_type}: {error_msg}", flush=True, file=sys.stdout)
print("[CELERY TASK] Full traceback:", flush=True, file=sys.stdout)
traceback.print_exc(file=sys.stdout)
print("=" * 80, flush=True, file=sys.stdout)
logger.error(f"✗ Error generating idea for cluster {cluster_id}: {error_msg}", exc_info=True)
# Update task state with exception
self.update_state(
state='PROGRESS',
meta={
'current': idx + 1,
'total': total_clusters,
'percentage': progress_pct,
'message': f'Exception generating idea for cluster "{cluster.name}": {error_msg}',
'phase': 'error',
'current_item': cluster.name,
'error': error_msg,
'error_type': error_type
}
)
# Ideas are already saved by the new framework, just log results
logger.info("=" * 80)
logger.info(f"IDEAS GENERATION COMPLETE: {ideas_created} ideas created")
logger.info("=" * 80)
if ideas_created == 0:
logger.warning("No ideas were created")
self.update_state(
state='FAILURE',
meta={
'error': 'No ideas created',
'message': 'No ideas were created'
}
)
return {'success': False, 'error': 'No ideas created'}
# Final progress update
final_message = f"Ideas generation complete: {ideas_created} ideas created for {total_clusters} clusters"
logger.info(final_message)
return {
'success': True,
'ideas_created': ideas_created,
'message': final_message,
}
except Exception as e:
logger.error(f"Error in auto_generate_ideas_task: {str(e)}", exc_info=True)
self.update_state(
state='FAILURE',
meta={
'error': str(e),
'message': f'Error: {str(e)}'
}
)
raise
def _generate_single_idea_core(cluster_id: int, account_id: int = None, progress_callback=None):
"""
Core logic for generating a single content idea for a cluster. Can be called with or without Celery.
Args:
cluster_id: Cluster ID to generate idea for
account_id: Account ID for account isolation
progress_callback: Optional function to call for progress updates (for Celery tasks)
"""
account_id = account_id
try:
# Initialize progress if callback provided
if progress_callback:
progress_callback(
state='PROGRESS',
meta={
'current': 0,
'total': 1,
'percentage': 0,
'message': 'Initializing single idea generation...',
'phase': 'initializing'
}
)
# Get cluster with keywords and relationships
clusters_queryset = Clusters.objects.filter(id=cluster_id)
if account_id:
clusters_queryset = clusters_queryset.filter(account_id=account_id)
clusters = list(clusters_queryset.select_related('sector', 'account', 'site').prefetch_related('keywords'))
if not clusters:
logger.warning(f"Cluster not found: {cluster_id}")
return {'success': False, 'error': 'Cluster not found'}
cluster = clusters[0]
# Update progress: Preparing cluster (0-10%)
if progress_callback:
progress_callback(
state='PROGRESS',
meta={
'current': 0,
'total': 1,
'percentage': 5,
'message': f'Preparing cluster "{cluster.name}"...',
'phase': 'preparing',
'current_item': cluster.name
}
)
# Get keywords for this cluster
keywords = Keywords.objects.filter(cluster=cluster).values_list('keyword', flat=True)
# Format cluster data for AI
cluster_data = [{
'id': cluster.id,
'name': cluster.name,
'description': cluster.description or '',
'keywords': list(keywords),
}]
# Update progress: Generating idea with AI (10-80%)
if progress_callback:
progress_callback(
state='PROGRESS',
meta={
'current': 0,
'total': 1,
'percentage': 10,
'message': 'Generating content idea with AI...',
'phase': 'generating'
}
)
# Use new AI framework with proper logging
account = getattr(cluster, 'account', None)
account_id = account.id if account else None
# Use new framework with proper console logging
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
# Use generate_ideas_core which has ConsoleStepTracker built in
try:
result = generate_ideas_core(cluster_id, account_id=account_id, progress_callback=progress_callback)
except Exception as e:
logger.error(f"Error generating idea: {str(e)}", exc_info=True)
return {'success': False, 'error': str(e)}
if not result.get('success'):
error_msg = result.get('error', 'Unknown error')
logger.error(f"AI idea generation error: {error_msg}")
return {'success': False, 'error': error_msg}
idea_created = result.get('idea_created', 0) or result.get('ideas_created', 0)
if progress_callback:
progress_callback(
state='PROGRESS',
meta={
'current': 1,
'total': 1,
'percentage': 95,
'message': 'Idea generation complete',
'phase': 'complete'
}
)
# Ideas are already saved by the new framework (generate_ideas_core or run_ai_task)
# No need to save again here
# Final progress update
final_message = f"Idea generation complete: {idea_created} idea(s) created"
logger.info(final_message)
if progress_callback:
progress_callback(
state='SUCCESS',
meta={
'current': 1,
'total': 1,
'percentage': 100,
'message': final_message,
'phase': 'completed'
}
)
return {
'success': True,
'idea_created': idea_created,
'message': final_message,
}
except Exception as e:
logger.error(f"Error in _generate_single_idea_core: {str(e)}", exc_info=True)
if progress_callback:
progress_callback(
state='FAILURE',
meta={
'error': str(e),
'message': f'Error: {str(e)}'
}
)
return {'success': False, 'error': str(e)}
@shared_task(bind=True, max_retries=3)
def generate_single_idea_task(self, cluster_id: int, account_id: int = None):
"""
Celery task to generate a single content idea for a cluster using AI.
Args:
cluster_id: Cluster ID
account_id: Account ID for account isolation
"""
def progress_callback(state, meta):
self.update_state(state=state, meta=meta)
return _generate_single_idea_core(cluster_id, account_id, progress_callback)
# REMOVED: All idea generation functions removed
# - auto_generate_ideas_task
# - _generate_single_idea_core
# - generate_single_idea_task

View File

@@ -714,99 +714,7 @@ class ClusterViewSet(SiteSectorModelViewSet):
# Save with all required fields explicitly
serializer.save(account=account, site=site, sector=sector)
@action(detail=False, methods=['post'], url_path='auto_generate_ideas', url_name='auto_generate_ideas')
def auto_generate_ideas(self, request):
"""Generate content ideas for clusters using AI"""
ids = request.data.get('ids', [])
if not ids:
return Response({'error': 'No cluster IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
if len(ids) > 5:
return Response({'error': 'Maximum 5 clusters allowed for idea generation'}, status=status.HTTP_400_BAD_REQUEST)
# Get account - handle RelatedObjectDoesNotExist
account = None
account_id = None
try:
account = getattr(request, 'account', None)
if account:
# Access pk directly instead of id to avoid potential relationship access
account_id = getattr(account, 'pk', None) or getattr(account, 'id', None)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Error getting account: {type(e).__name__}: {e}", exc_info=True)
account_id = None
# Try to queue Celery task, fall back to synchronous if Celery not available
try:
import logging
logger = logging.getLogger(__name__)
logger.info(f"auto_generate_ideas called with ids={ids}, account_id={account_id}")
from .tasks import auto_generate_ideas_task
from kombu.exceptions import OperationalError as KombuOperationalError
if hasattr(auto_generate_ideas_task, 'delay'):
try:
# Celery is available - queue async task
logger.info("Queuing Celery task...")
task = auto_generate_ideas_task.delay(ids, account_id=account_id)
logger.info(f"Task queued successfully: {task.id}")
return Response({
'success': True,
'task_id': str(task.id),
'message': 'Idea generation started'
}, status=status.HTTP_200_OK)
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
result = auto_generate_ideas_task(ids, account_id=account_id)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
# Celery not available - execute synchronously
logger.info("Celery not available, executing synchronously")
result = auto_generate_ideas_task(ids, account_id=account_id)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except ImportError as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"ImportError in auto_generate_ideas: {e}", exc_info=True)
return Response({
'success': False,
'error': 'AI tasks module not available'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Error in auto_generate_ideas: {error_type}: {error_msg}", exc_info=True)
return Response({
'success': False,
'error': f'Unexpected error: {error_msg}'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# REMOVED: auto_generate_ideas action - idea generation function removed
def list(self, request, *args, **kwargs):
"""
@@ -938,95 +846,4 @@ class ContentIdeasViewSet(SiteSectorModelViewSet):
'message': f'Successfully queued {len(created_tasks)} ideas to writer'
}, status=status.HTTP_200_OK)
@action(detail=True, methods=['post'], url_path='generate_idea', url_name='generate_idea')
def generate_idea(self, request, pk=None):
"""Generate a single content idea for a cluster using AI"""
import logging
logger = logging.getLogger(__name__)
try:
cluster_id = request.data.get('cluster_id')
if not cluster_id:
return Response({'error': 'cluster_id is required'}, status=status.HTTP_400_BAD_REQUEST)
# Get account - handle RelatedObjectDoesNotExist
account = None
account_id = None
try:
account = getattr(request, 'account', None)
if account:
account_id = getattr(account, 'pk', None) or getattr(account, 'id', None)
except Exception as e:
logger.error(f"Error getting account: {type(e).__name__}: {e}", exc_info=True)
account_id = None
# Try to queue Celery task, fall back to synchronous if Celery not available
try:
from .tasks import generate_single_idea_task
from kombu.exceptions import OperationalError as KombuOperationalError
if hasattr(generate_single_idea_task, 'delay'):
try:
# Celery is available - queue async task
task = generate_single_idea_task.delay(cluster_id, account_id=account_id)
return Response({
'success': True,
'task_id': str(task.id),
'message': 'Idea generation started'
}, status=status.HTTP_200_OK)
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
result = generate_ideas_core(cluster_id, account_id=account_id, progress_callback=None)
if result.get('success'):
return Response({
'success': True,
'idea_created': result.get('idea_created', 0),
'message': 'Idea generated successfully'
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
# Celery not available - execute synchronously
logger.info("Celery not available, executing synchronously")
from .tasks import _generate_single_idea_core
result = _generate_single_idea_core(cluster_id, account_id=account_id, progress_callback=None)
if result.get('success'):
return Response({
'success': True,
'idea_created': result.get('idea_created', 0),
'message': 'Idea generated successfully'
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except ImportError as e:
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Error importing tasks module: {error_type}: {error_msg}", exc_info=True)
return Response({
'success': False,
'error': 'AI tasks module not available'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Error in generate_idea: {error_type}: {error_msg}", exc_info=True)
return Response({
'success': False,
'error': f'Unexpected error: {error_msg}'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Unexpected error in generate_idea: {error_type}: {error_msg}", exc_info=True)
return Response({
'success': False,
'error': f'Unexpected error: {error_msg}'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
# REMOVED: generate_idea action - idea generation function removed