Add SEO fields to Tasks model, improve content generation response handling, and enhance progress bar animation

- Added primary_keyword, secondary_keywords, tags, and categories fields to Tasks model
- Updated generate_content function to handle full JSON response with all SEO fields
- Improved progress bar animation: smooth 1% increments every 300ms
- Enhanced step detection for content generation vs clustering vs ideas
- Fixed progress modal to show correct messages for each function type
- Added comprehensive logging to Keywords and Tasks pages for AI functions
- Fixed error handling to show meaningful error messages instead of generic failures
This commit is contained in:
Gitea Deploy
2025-11-09 21:22:34 +00:00
parent 09d22ab0e2
commit 961362e088
17340 changed files with 10636 additions and 2248776 deletions

View File

@@ -716,96 +716,99 @@ class ClusterViewSet(SiteSectorModelViewSet):
@action(detail=False, methods=['post'], url_path='auto_generate_ideas', url_name='auto_generate_ideas')
def auto_generate_ideas(self, request):
"""Generate content ideas for clusters using AI"""
ids = request.data.get('ids', [])
"""Generate content ideas for clusters using AI - New unified framework"""
import logging
from igny8_core.ai.tasks import run_ai_task
from kombu.exceptions import OperationalError as KombuOperationalError
if not ids:
return Response({'error': 'No cluster IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
logger = logging.getLogger(__name__)
if len(ids) > 5:
return Response({'error': 'Maximum 5 clusters allowed for idea generation'}, status=status.HTTP_400_BAD_REQUEST)
# Get account - handle RelatedObjectDoesNotExist
account = None
account_id = None
try:
# Get account
account = getattr(request, 'account', None)
if account:
# Access pk directly instead of id to avoid potential relationship access
account_id = getattr(account, 'pk', None) or getattr(account, 'id', None)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Error getting account: {type(e).__name__}: {e}", exc_info=True)
account_id = None
# Try to queue Celery task, fall back to synchronous if Celery not available
try:
import logging
logger = logging.getLogger(__name__)
logger.info(f"auto_generate_ideas called with ids={ids}, account_id={account_id}")
account_id = account.id if account else None
from .tasks import auto_generate_ideas_task
from kombu.exceptions import OperationalError as KombuOperationalError
# Prepare payload
payload = {
'ids': request.data.get('ids', [])
}
if hasattr(auto_generate_ideas_task, 'delay'):
try:
# Celery is available - queue async task
logger.info("Queuing Celery task...")
task = auto_generate_ideas_task.delay(ids, account_id=account_id)
logger.info(f"Task queued successfully: {task.id}")
logger.info(f"auto_generate_ideas called with ids={payload['ids']}, account_id={account_id}")
# Validate basic input
if not payload['ids']:
return Response({
'success': False,
'error': 'No cluster IDs provided'
}, status=status.HTTP_400_BAD_REQUEST)
if len(payload['ids']) > 10:
return Response({
'success': False,
'error': 'Maximum 10 clusters allowed for idea generation'
}, status=status.HTTP_400_BAD_REQUEST)
# Try to queue Celery task
try:
if hasattr(run_ai_task, 'delay'):
task = run_ai_task.delay(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
logger.info(f"Task queued: {task.id}")
return Response({
'success': True,
'task_id': str(task.id),
'message': 'Idea generation started'
}, status=status.HTTP_200_OK)
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
result = auto_generate_ideas_task(ids, account_id=account_id)
else:
# Celery not available - execute synchronously
logger.warning("Celery not available, executing synchronously")
result = run_ai_task(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
**result
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
# Celery not available - execute synchronously
logger.info("Celery not available, executing synchronously")
result = auto_generate_ideas_task(ids, account_id=account_id)
except (KombuOperationalError, ConnectionError) as e:
# Broker connection failed - fall back to synchronous execution
logger.warning(f"Celery broker unavailable, falling back to synchronous execution: {str(e)}")
result = run_ai_task(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
**result
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except ImportError as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"ImportError in auto_generate_ideas: {e}", exc_info=True)
return Response({
'success': False,
'error': 'AI tasks module not available'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
logger.error(f"Error in auto_generate_ideas: {str(e)}", exc_info=True)
return Response({
'success': False,
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Error in auto_generate_ideas: {error_type}: {error_msg}", exc_info=True)
logger.error(f"Unexpected error in auto_generate_ideas: {str(e)}", exc_info=True)
return Response({
'success': False,
'error': f'Unexpected error: {error_msg}'
'error': f'Unexpected error: {str(e)}'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def list(self, request, *args, **kwargs):
@@ -977,8 +980,8 @@ class ContentIdeasViewSet(SiteSectorModelViewSet):
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
from .tasks import _generate_single_idea_core
result = _generate_single_idea_core(cluster_id, account_id=account_id, progress_callback=None)
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
result = generate_ideas_core(cluster_id, account_id=account_id, progress_callback=None)
if result.get('success'):
return Response({
'success': True,