Add SEO fields to Tasks model, improve content generation response handling, and enhance progress bar animation

- Added primary_keyword, secondary_keywords, tags, and categories fields to Tasks model
- Updated generate_content function to handle full JSON response with all SEO fields
- Improved progress bar animation: smooth 1% increments every 300ms
- Enhanced step detection for content generation vs clustering vs ideas
- Fixed progress modal to show correct messages for each function type
- Added comprehensive logging to Keywords and Tasks pages for AI functions
- Fixed error handling to show meaningful error messages instead of generic failures
This commit is contained in:
Gitea Deploy
2025-11-09 21:22:34 +00:00
parent 09d22ab0e2
commit 961362e088
17340 changed files with 10636 additions and 2248776 deletions

View File

@@ -7,6 +7,8 @@ from typing import List
from django.db import transaction
from igny8_core.modules.planner.models import Keywords, Clusters, ContentIdeas
from igny8_core.utils.ai_processor import ai_processor
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
from igny8_core.ai.tracker import ConsoleStepTracker
logger = logging.getLogger(__name__)
@@ -23,9 +25,19 @@ except ImportError:
return decorator
# ============================================================================
# DEPRECATED: This function is deprecated. Use the new AI framework instead.
# New path: views.py -> run_ai_task -> AIEngine -> AutoClusterFunction
# This function is kept for backward compatibility but should not be used.
# ============================================================================
def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, account_id: int = None, progress_callback=None):
"""
Core logic for clustering keywords. Can be called with or without Celery.
[DEPRECATED] Core logic for clustering keywords. Can be called with or without Celery.
⚠️ WARNING: This function is deprecated. Use the new AI framework instead:
- New path: views.py -> run_ai_task -> AIEngine -> AutoClusterFunction
- This function uses the old AIProcessor and does not use PromptRegistry
- Console logging may not work correctly in this path
Args:
keyword_ids: List of keyword IDs to cluster
@@ -33,7 +45,11 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
account_id: Account ID for account isolation
progress_callback: Optional function to call for progress updates (for Celery tasks)
"""
# Track request and response steps
# Initialize console step tracker for logging
tracker = ConsoleStepTracker('auto_cluster')
tracker.init(f"Starting keyword clustering for {len(keyword_ids)} keywords")
# Track request and response steps (for Celery progress callbacks)
request_steps = []
response_steps = []
@@ -56,6 +72,7 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
)
# Step 4: Keyword Loading & Validation
tracker.prep(f"Loading {len(keyword_ids)} keywords from database")
step_start = time.time()
keywords_queryset = Keywords.objects.filter(id__in=keyword_ids)
if account_id:
@@ -66,7 +83,9 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
keywords = list(keywords_queryset.select_related('account', 'site', 'site__account', 'sector', 'sector__site'))
if not keywords:
logger.warning(f"No keywords found for clustering: {keyword_ids}")
error_msg = f"No keywords found for clustering: {keyword_ids}"
logger.warning(error_msg)
tracker.error('Validation', error_msg)
request_steps.append({
'stepNumber': 4,
'stepName': 'Keyword Loading & Validation',
@@ -83,6 +102,7 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
)
return {'success': False, 'error': 'No keywords found', 'request_steps': request_steps, 'response_steps': response_steps}
tracker.prep(f"Loaded {len(keywords)} keywords successfully")
request_steps.append({
'stepNumber': 4,
'stepName': 'Keyword Loading & Validation',
@@ -329,10 +349,20 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
return {'success': False, 'error': f'Error preparing AI call: {str(e)}', 'request_steps': request_steps, 'response_steps': response_steps}
# Call AI with step tracking
result = processor.cluster_keywords(keyword_data, sector_name=sector_name, account=account, response_steps=response_steps, progress_callback=progress_callback)
tracker.ai_call(f"Sending {len(keyword_data)} keywords to AI for clustering")
result = processor.cluster_keywords(
keyword_data,
sector_name=sector_name,
account=account,
response_steps=response_steps,
progress_callback=progress_callback,
tracker=tracker # Pass tracker for console logging
)
if result.get('error'):
logger.error(f"AI clustering error: {result['error']}")
error_msg = f"AI clustering error: {result['error']}"
logger.error(error_msg)
tracker.error('AI_CALL', error_msg)
if progress_callback:
progress_callback(
state='FAILURE',
@@ -345,6 +375,9 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
)
return {'success': False, 'error': result['error'], 'request_steps': request_steps, 'response_steps': response_steps}
# Parse response
tracker.parse("Parsing AI response into cluster data")
# Update response_steps from result if available
if result.get('response_steps'):
response_steps.extend(result.get('response_steps', []))
@@ -369,6 +402,7 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
keywords_updated = 0
# Step 13: Database Transaction Start
tracker.save(f"Creating {len(clusters_data)} clusters in database")
step_start = time.time()
# Create/update clusters and assign keywords
# Note: account and sector are already extracted above to avoid database queries inside transaction
@@ -566,6 +600,7 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
# Final progress update
final_message = f"Clustering complete: {clusters_created} clusters created, {keywords_updated} keywords updated"
logger.info(final_message)
tracker.done(final_message)
if progress_callback:
progress_callback(
@@ -587,7 +622,9 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
}
except Exception as e:
logger.error(f"Error in auto_cluster_keywords_core: {str(e)}", exc_info=True)
error_msg = f"Error in auto_cluster_keywords_core: {str(e)}"
logger.error(error_msg, exc_info=True)
tracker.error('Exception', error_msg, exception=e)
if progress_callback:
progress_callback(
state='FAILURE',
@@ -607,10 +644,18 @@ def _auto_cluster_keywords_core(keyword_ids: List[int], sector_id: int = None, a
@shared_task(bind=True, max_retries=3)
# ============================================================================
# DEPRECATED: This Celery task is deprecated. Use run_ai_task instead.
# New path: views.py -> run_ai_task -> AIEngine -> AutoClusterFunction
# ============================================================================
def auto_cluster_keywords_task(self, keyword_ids: List[int], sector_id: int = None, account_id: int = None):
"""
Celery task wrapper for clustering keywords using AI.
Calls the core function with progress callback.
[DEPRECATED] Celery task wrapper for clustering keywords using AI.
⚠️ WARNING: This task is deprecated. Use the new AI framework instead:
- New path: views.py -> run_ai_task -> AIEngine -> AutoClusterFunction
- This task uses the old _auto_cluster_keywords_core function
- Console logging may not work correctly in this path
Args:
keyword_ids: List of keyword IDs to cluster

View File

@@ -716,96 +716,99 @@ class ClusterViewSet(SiteSectorModelViewSet):
@action(detail=False, methods=['post'], url_path='auto_generate_ideas', url_name='auto_generate_ideas')
def auto_generate_ideas(self, request):
"""Generate content ideas for clusters using AI"""
ids = request.data.get('ids', [])
"""Generate content ideas for clusters using AI - New unified framework"""
import logging
from igny8_core.ai.tasks import run_ai_task
from kombu.exceptions import OperationalError as KombuOperationalError
if not ids:
return Response({'error': 'No cluster IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
logger = logging.getLogger(__name__)
if len(ids) > 5:
return Response({'error': 'Maximum 5 clusters allowed for idea generation'}, status=status.HTTP_400_BAD_REQUEST)
# Get account - handle RelatedObjectDoesNotExist
account = None
account_id = None
try:
# Get account
account = getattr(request, 'account', None)
if account:
# Access pk directly instead of id to avoid potential relationship access
account_id = getattr(account, 'pk', None) or getattr(account, 'id', None)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"Error getting account: {type(e).__name__}: {e}", exc_info=True)
account_id = None
# Try to queue Celery task, fall back to synchronous if Celery not available
try:
import logging
logger = logging.getLogger(__name__)
logger.info(f"auto_generate_ideas called with ids={ids}, account_id={account_id}")
account_id = account.id if account else None
from .tasks import auto_generate_ideas_task
from kombu.exceptions import OperationalError as KombuOperationalError
# Prepare payload
payload = {
'ids': request.data.get('ids', [])
}
if hasattr(auto_generate_ideas_task, 'delay'):
try:
# Celery is available - queue async task
logger.info("Queuing Celery task...")
task = auto_generate_ideas_task.delay(ids, account_id=account_id)
logger.info(f"Task queued successfully: {task.id}")
logger.info(f"auto_generate_ideas called with ids={payload['ids']}, account_id={account_id}")
# Validate basic input
if not payload['ids']:
return Response({
'success': False,
'error': 'No cluster IDs provided'
}, status=status.HTTP_400_BAD_REQUEST)
if len(payload['ids']) > 10:
return Response({
'success': False,
'error': 'Maximum 10 clusters allowed for idea generation'
}, status=status.HTTP_400_BAD_REQUEST)
# Try to queue Celery task
try:
if hasattr(run_ai_task, 'delay'):
task = run_ai_task.delay(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
logger.info(f"Task queued: {task.id}")
return Response({
'success': True,
'task_id': str(task.id),
'message': 'Idea generation started'
}, status=status.HTTP_200_OK)
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
result = auto_generate_ideas_task(ids, account_id=account_id)
else:
# Celery not available - execute synchronously
logger.warning("Celery not available, executing synchronously")
result = run_ai_task(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
**result
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
else:
# Celery not available - execute synchronously
logger.info("Celery not available, executing synchronously")
result = auto_generate_ideas_task(ids, account_id=account_id)
except (KombuOperationalError, ConnectionError) as e:
# Broker connection failed - fall back to synchronous execution
logger.warning(f"Celery broker unavailable, falling back to synchronous execution: {str(e)}")
result = run_ai_task(
function_name='generate_ideas',
payload=payload,
account_id=account_id
)
if result.get('success'):
return Response({
'success': True,
'ideas_created': result.get('ideas_created', 0),
'message': 'Ideas generated successfully'
**result
}, status=status.HTTP_200_OK)
else:
return Response({
'success': False,
'error': result.get('error', 'Idea generation failed')
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except ImportError as e:
import logging
logger = logging.getLogger(__name__)
logger.error(f"ImportError in auto_generate_ideas: {e}", exc_info=True)
return Response({
'success': False,
'error': 'AI tasks module not available'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
logger.error(f"Error in auto_generate_ideas: {str(e)}", exc_info=True)
return Response({
'success': False,
'error': str(e)
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except Exception as e:
import logging
logger = logging.getLogger(__name__)
error_type = type(e).__name__
error_msg = str(e)
logger.error(f"Error in auto_generate_ideas: {error_type}: {error_msg}", exc_info=True)
logger.error(f"Unexpected error in auto_generate_ideas: {str(e)}", exc_info=True)
return Response({
'success': False,
'error': f'Unexpected error: {error_msg}'
'error': f'Unexpected error: {str(e)}'
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def list(self, request, *args, **kwargs):
@@ -977,8 +980,8 @@ class ContentIdeasViewSet(SiteSectorModelViewSet):
except (KombuOperationalError, ConnectionError) as e:
# Celery connection failed - execute synchronously
logger.warning(f"Celery connection failed, executing synchronously: {e}")
from .tasks import _generate_single_idea_core
result = _generate_single_idea_core(cluster_id, account_id=account_id, progress_callback=None)
from igny8_core.ai.functions.generate_ideas import generate_ideas_core
result = generate_ideas_core(cluster_id, account_id=account_id, progress_callback=None)
if result.get('success'):
return Response({
'success': True,

View File

@@ -777,25 +777,72 @@ class IntegrationSettingsViewSet(viewsets.ViewSet):
try:
task_state = task.state
except (ValueError, KeyError) as state_exc:
# Task has malformed exception info - try to get error from result
# Task has malformed exception info - try to get error from multiple sources
logger.warning(f"Error accessing task.state (malformed exception info): {str(state_exc)}")
error_msg = 'Task failed - exception details unavailable'
error_type = 'UnknownError'
request_steps = []
response_steps = []
# First, try to get from backend's stored meta (most reliable for our update_state calls)
try:
# Try to get error from task.result
if hasattr(task, 'result'):
result = task.result
if isinstance(result, dict) and 'error' in result:
error_msg = result['error']
elif isinstance(result, str):
error_msg = result
except Exception:
pass
backend = task.backend
if hasattr(backend, 'get_task_meta'):
stored_meta = backend.get_task_meta(task_id)
if stored_meta and isinstance(stored_meta, dict):
meta = stored_meta.get('meta', {})
if isinstance(meta, dict):
if 'error' in meta:
error_msg = meta.get('error')
if 'error_type' in meta:
error_type = meta.get('error_type', error_type)
if 'request_steps' in meta:
request_steps = meta.get('request_steps', [])
if 'response_steps' in meta:
response_steps = meta.get('response_steps', [])
except Exception as e:
logger.debug(f"Error getting from backend meta: {str(e)}")
# Try to get error from task.result
if error_msg == 'Task failed - exception details unavailable':
try:
if hasattr(task, 'result'):
result = task.result
if isinstance(result, dict):
error_msg = result.get('error', error_msg)
error_type = result.get('error_type', error_type)
request_steps = result.get('request_steps', request_steps)
response_steps = result.get('response_steps', response_steps)
elif isinstance(result, str):
error_msg = result
except Exception as e:
logger.debug(f"Error extracting error from task.result: {str(e)}")
# Also try to get error from task.info
if error_msg == 'Task failed - exception details unavailable':
try:
if hasattr(task, 'info') and task.info:
if isinstance(task.info, dict):
if 'error' in task.info:
error_msg = task.info['error']
if 'error_type' in task.info:
error_type = task.info['error_type']
if 'request_steps' in task.info:
request_steps = task.info.get('request_steps', request_steps)
if 'response_steps' in task.info:
response_steps = task.info.get('response_steps', response_steps)
except Exception as e:
logger.debug(f"Error extracting error from task.info: {str(e)}")
return Response({
'state': 'FAILURE',
'meta': {
'error': error_msg,
'error_type': error_type,
'percentage': 0,
'message': f'Error: {error_msg}',
'request_steps': request_steps,
'response_steps': response_steps,
}
})
except (KombuOperationalError, RedisConnectionError, ConnectionError) as conn_exc:
@@ -834,15 +881,29 @@ class IntegrationSettingsViewSet(viewsets.ViewSet):
})
# Safely get task info/result
# Try to get error from task.result first (before it gets malformed)
# Try to get error from multiple sources
task_result = None
task_info = None
error_message = None
error_type = None
# First, try to get from backend's stored meta (most reliable for our update_state calls)
try:
backend = task.backend
if hasattr(backend, 'get_task_meta'):
stored_meta = backend.get_task_meta(task_id)
if stored_meta and isinstance(stored_meta, dict):
meta = stored_meta.get('meta', {})
if isinstance(meta, dict):
if 'error' in meta:
error_message = meta.get('error')
error_type = meta.get('error_type', 'UnknownError')
except Exception as backend_err:
logger.debug(f"Could not get from backend meta: {backend_err}")
try:
# Try to get result first - this often has the actual error
if hasattr(task, 'result'):
if not error_message and hasattr(task, 'result'):
try:
task_result = task.result
# If result is a dict with error, extract it
@@ -850,6 +911,9 @@ class IntegrationSettingsViewSet(viewsets.ViewSet):
if 'error' in task_result:
error_message = task_result.get('error')
error_type = task_result.get('error_type', 'UnknownError')
elif 'success' in task_result and not task_result.get('success'):
error_message = task_result.get('error', 'Task failed')
error_type = task_result.get('error_type', 'UnknownError')
except Exception:
pass # Will try task.info next
except Exception:
@@ -971,6 +1035,25 @@ class IntegrationSettingsViewSet(viewsets.ViewSet):
else:
error_message = str(error_info) if error_info else 'Task failed'
# If still no error message, try to get from task backend directly
if not error_message:
try:
# Try to get from backend's stored result
backend = task.backend
if hasattr(backend, 'get'):
stored = backend.get(task_id)
if stored and isinstance(stored, dict):
if 'error' in stored:
error_message = stored['error']
elif isinstance(stored.get('result'), dict) and 'error' in stored['result']:
error_message = stored['result']['error']
except Exception as backend_err:
logger.warning(f"Error getting from backend: {backend_err}")
# Final fallback
if not error_message:
error_message = 'Task failed - check backend logs for details'
response_meta = {
'error': error_message,
'percentage': 0,
@@ -992,6 +1075,13 @@ class IntegrationSettingsViewSet(viewsets.ViewSet):
# Also include error_type if available in meta
if 'error_type' in meta and not error_type:
response_meta['error_type'] = meta['error_type']
# Also check for error in meta directly
if 'error' in meta and not error_message:
error_message = meta['error']
response_meta['error'] = error_message
if 'error_type' in meta and not error_type:
error_type = meta['error_type']
response_meta['error_type'] = error_type
return Response({
'state': task_state,

View File

@@ -0,0 +1,35 @@
# Generated migration for adding SEO fields to Tasks model
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('writer', '0003_alter_content_options_alter_images_options_and_more'),
('igny8_core_auth', '0008_passwordresettoken_alter_industry_options_and_more'),
]
operations = [
migrations.AddField(
model_name='tasks',
name='primary_keyword',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='tasks',
name='secondary_keywords',
field=models.JSONField(blank=True, default=list, help_text='List of secondary keywords'),
),
migrations.AddField(
model_name='tasks',
name='tags',
field=models.JSONField(blank=True, default=list, help_text='List of tags'),
),
migrations.AddField(
model_name='tasks',
name='categories',
field=models.JSONField(blank=True, default=list, help_text='List of categories'),
),
]

View File

@@ -65,6 +65,10 @@ class Tasks(SiteSectorBaseModel):
# SEO fields
meta_title = models.CharField(max_length=255, blank=True, null=True)
meta_description = models.TextField(blank=True, null=True)
primary_keyword = models.CharField(max_length=255, blank=True, null=True)
secondary_keywords = models.JSONField(default=list, blank=True, help_text="List of secondary keywords")
tags = models.JSONField(default=list, blank=True, help_text="List of tags")
categories = models.JSONField(default=list, blank=True, help_text="List of categories")
# WordPress integration
assigned_post_id = models.IntegerField(null=True, blank=True) # WordPress post ID if published

View File

@@ -8,6 +8,8 @@ from django.db import transaction
from igny8_core.modules.writer.models import Tasks, Images, Content
from igny8_core.utils.ai_processor import ai_processor
from igny8_core.modules.system.utils import get_prompt_value, get_default_prompt
from igny8_core.ai.functions.generate_content import generate_content_core
from igny8_core.ai.functions.generate_images import generate_images_core
logger = logging.getLogger(__name__)