- Introduced a new endpoint in the backend to handle bulk updates of image statuses by content ID or image IDs. - Updated the frontend to include a new row action for updating image status and integrated a modal for status confirmation. - Enhanced the API service to support bulk status updates and updated the images page to manage status updates effectively.
729 lines
34 KiB
Python
729 lines
34 KiB
Python
from rest_framework import viewsets, filters, status
|
|
from rest_framework.decorators import action
|
|
from rest_framework.response import Response
|
|
from django_filters.rest_framework import DjangoFilterBackend
|
|
from django.db import transaction, models
|
|
from django.db.models import Q
|
|
from igny8_core.api.base import SiteSectorModelViewSet
|
|
from igny8_core.api.pagination import CustomPageNumberPagination
|
|
from .models import Tasks, Images, Content
|
|
from .serializers import TasksSerializer, ImagesSerializer, ContentSerializer
|
|
|
|
|
|
class TasksViewSet(SiteSectorModelViewSet):
|
|
"""
|
|
ViewSet for managing tasks with CRUD operations
|
|
"""
|
|
queryset = Tasks.objects.select_related('content_record')
|
|
serializer_class = TasksSerializer
|
|
pagination_class = CustomPageNumberPagination # Explicitly use custom pagination
|
|
|
|
# DRF filtering configuration
|
|
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
|
|
|
|
# Search configuration
|
|
search_fields = ['title', 'keywords']
|
|
|
|
# Ordering configuration
|
|
ordering_fields = ['title', 'created_at', 'word_count', 'status']
|
|
ordering = ['-created_at'] # Default ordering (newest first)
|
|
|
|
# Filter configuration
|
|
filterset_fields = ['status', 'cluster_id', 'content_type', 'content_structure']
|
|
|
|
def perform_create(self, serializer):
|
|
"""Require explicit site_id and sector_id - no defaults."""
|
|
user = getattr(self.request, 'user', None)
|
|
|
|
try:
|
|
query_params = getattr(self.request, 'query_params', None)
|
|
if query_params is None:
|
|
query_params = getattr(self.request, 'GET', {})
|
|
except AttributeError:
|
|
query_params = {}
|
|
|
|
site_id = serializer.validated_data.get('site_id') or query_params.get('site_id')
|
|
sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id')
|
|
|
|
from igny8_core.auth.models import Site, Sector
|
|
from rest_framework.exceptions import ValidationError
|
|
|
|
# Site ID is REQUIRED
|
|
if not site_id:
|
|
raise ValidationError("site_id is required. Please select a site.")
|
|
|
|
try:
|
|
site = Site.objects.get(id=site_id)
|
|
except Site.DoesNotExist:
|
|
raise ValidationError(f"Site with id {site_id} does not exist")
|
|
|
|
# Sector ID is REQUIRED
|
|
if not sector_id:
|
|
raise ValidationError("sector_id is required. Please select a sector.")
|
|
|
|
try:
|
|
sector = Sector.objects.get(id=sector_id)
|
|
if sector.site_id != site_id:
|
|
raise ValidationError(f"Sector does not belong to the selected site")
|
|
except Sector.DoesNotExist:
|
|
raise ValidationError(f"Sector with id {sector_id} does not exist")
|
|
|
|
serializer.validated_data.pop('site_id', None)
|
|
serializer.validated_data.pop('sector_id', None)
|
|
|
|
account = getattr(self.request, 'account', None)
|
|
if not account and user and user.is_authenticated and user.account:
|
|
account = user.account
|
|
if not account:
|
|
account = site.account
|
|
|
|
serializer.save(account=account, site=site, sector=sector)
|
|
|
|
@action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete')
|
|
def bulk_delete(self, request):
|
|
"""Bulk delete tasks"""
|
|
ids = request.data.get('ids', [])
|
|
if not ids:
|
|
return Response({'error': 'No IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
queryset = self.get_queryset()
|
|
deleted_count, _ = queryset.filter(id__in=ids).delete()
|
|
|
|
return Response({'deleted_count': deleted_count}, status=status.HTTP_200_OK)
|
|
|
|
@action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update')
|
|
def bulk_update(self, request):
|
|
"""Bulk update task status"""
|
|
ids = request.data.get('ids', [])
|
|
status_value = request.data.get('status')
|
|
|
|
if not ids:
|
|
return Response({'error': 'No IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
if not status_value:
|
|
return Response({'error': 'No status provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
queryset = self.get_queryset()
|
|
updated_count = queryset.filter(id__in=ids).update(status=status_value)
|
|
|
|
return Response({'updated_count': updated_count}, status=status.HTTP_200_OK)
|
|
|
|
@action(detail=False, methods=['post'], url_path='auto_generate_content', url_name='auto_generate_content')
|
|
def auto_generate_content(self, request):
|
|
"""Auto-generate content for tasks using AI"""
|
|
import logging
|
|
from django.db import OperationalError, DatabaseError, IntegrityError
|
|
from django.core.exceptions import ValidationError
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
try:
|
|
ids = request.data.get('ids', [])
|
|
if not ids:
|
|
logger.warning("auto_generate_content: No IDs provided")
|
|
return Response({
|
|
'error': 'No IDs provided',
|
|
'type': 'ValidationError'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
if len(ids) > 10:
|
|
logger.warning(f"auto_generate_content: Too many IDs provided: {len(ids)}")
|
|
return Response({
|
|
'error': 'Maximum 10 tasks allowed for content generation',
|
|
'type': 'ValidationError'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
logger.info(f"auto_generate_content: Processing {len(ids)} task IDs: {ids}")
|
|
|
|
# Get account
|
|
account = getattr(request, 'account', None)
|
|
account_id = account.id if account else None
|
|
logger.info(f"auto_generate_content: Account ID: {account_id}")
|
|
|
|
# Validate task IDs exist in database before proceeding
|
|
try:
|
|
queryset = self.get_queryset()
|
|
existing_tasks = queryset.filter(id__in=ids)
|
|
existing_count = existing_tasks.count()
|
|
existing_ids = list(existing_tasks.values_list('id', flat=True))
|
|
|
|
logger.info(f"auto_generate_content: Found {existing_count} existing tasks out of {len(ids)} requested")
|
|
logger.info(f"auto_generate_content: Existing task IDs: {existing_ids}")
|
|
|
|
if existing_count == 0:
|
|
logger.error(f"auto_generate_content: No tasks found for IDs: {ids}")
|
|
return Response({
|
|
'error': f'No tasks found for the provided IDs: {ids}',
|
|
'type': 'NotFound',
|
|
'requested_ids': ids
|
|
}, status=status.HTTP_404_NOT_FOUND)
|
|
|
|
if existing_count < len(ids):
|
|
missing_ids = set(ids) - set(existing_ids)
|
|
logger.warning(f"auto_generate_content: Some task IDs not found: {missing_ids}")
|
|
# Continue with existing tasks, but log warning
|
|
|
|
except (OperationalError, DatabaseError) as db_error:
|
|
logger.error("=" * 80)
|
|
logger.error("DATABASE ERROR: Failed to query tasks")
|
|
logger.error(f" - Error type: {type(db_error).__name__}")
|
|
logger.error(f" - Error message: {str(db_error)}")
|
|
logger.error(f" - Requested IDs: {ids}")
|
|
logger.error(f" - Account ID: {account_id}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': f'Database error while querying tasks: {str(db_error)}',
|
|
'type': 'OperationalError',
|
|
'details': 'Failed to retrieve tasks from database. Please check database connection and try again.'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
# Try to queue Celery task, fall back to synchronous if Celery not available
|
|
try:
|
|
from igny8_core.ai.tasks import run_ai_task
|
|
from kombu.exceptions import OperationalError as KombuOperationalError
|
|
|
|
if hasattr(run_ai_task, 'delay'):
|
|
# Celery is available - queue async task
|
|
logger.info(f"auto_generate_content: Queuing Celery task for {len(ids)} tasks")
|
|
try:
|
|
task = run_ai_task.delay(
|
|
function_name='generate_content',
|
|
payload={'ids': ids},
|
|
account_id=account_id
|
|
)
|
|
logger.info(f"auto_generate_content: Celery task queued successfully: {task.id}")
|
|
return Response({
|
|
'success': True,
|
|
'task_id': str(task.id),
|
|
'message': 'Content generation started'
|
|
}, status=status.HTTP_200_OK)
|
|
except KombuOperationalError as celery_error:
|
|
logger.error("=" * 80)
|
|
logger.error("CELERY ERROR: Failed to queue task")
|
|
logger.error(f" - Error type: {type(celery_error).__name__}")
|
|
logger.error(f" - Error message: {str(celery_error)}")
|
|
logger.error(f" - Task IDs: {ids}")
|
|
logger.error(f" - Account ID: {account_id}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': 'Task queue unavailable. Please try again.',
|
|
'type': 'QueueError'
|
|
}, status=status.HTTP_503_SERVICE_UNAVAILABLE)
|
|
except Exception as celery_error:
|
|
logger.error("=" * 80)
|
|
logger.error("CELERY ERROR: Failed to queue task")
|
|
logger.error(f" - Error type: {type(celery_error).__name__}")
|
|
logger.error(f" - Error message: {str(celery_error)}")
|
|
logger.error(f" - Task IDs: {ids}")
|
|
logger.error(f" - Account ID: {account_id}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
# Fall back to synchronous execution
|
|
logger.info("auto_generate_content: Falling back to synchronous execution")
|
|
result = run_ai_task(
|
|
function_name='generate_content',
|
|
payload={'ids': ids},
|
|
account_id=account_id
|
|
)
|
|
if result.get('success'):
|
|
return Response({
|
|
'success': True,
|
|
'tasks_updated': result.get('count', 0),
|
|
'message': 'Content generated successfully (synchronous)'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
return Response({
|
|
'error': result.get('error', 'Content generation failed'),
|
|
'type': 'TaskExecutionError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
else:
|
|
# Celery not available - execute synchronously
|
|
logger.info(f"auto_generate_content: Executing synchronously (Celery not available)")
|
|
result = run_ai_task(
|
|
function_name='generate_content',
|
|
payload={'ids': ids},
|
|
account_id=account_id
|
|
)
|
|
if result.get('success'):
|
|
logger.info(f"auto_generate_content: Synchronous execution successful: {result.get('count', 0)} tasks updated")
|
|
return Response({
|
|
'success': True,
|
|
'tasks_updated': result.get('count', 0),
|
|
'message': 'Content generated successfully'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
logger.error(f"auto_generate_content: Synchronous execution failed: {result.get('error', 'Unknown error')}")
|
|
return Response({
|
|
'error': result.get('error', 'Content generation failed'),
|
|
'type': 'TaskExecutionError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except ImportError as import_error:
|
|
logger.error(f"auto_generate_content: ImportError - tasks module not available: {str(import_error)}")
|
|
# Tasks module not available - update status only
|
|
try:
|
|
queryset = self.get_queryset()
|
|
tasks = queryset.filter(id__in=ids, status='queued')
|
|
updated_count = tasks.update(status='completed', content='[AI content generation not available]')
|
|
|
|
logger.info(f"auto_generate_content: Updated {updated_count} tasks (AI generation not available)")
|
|
return Response({
|
|
'updated_count': updated_count,
|
|
'message': 'Tasks updated (AI generation not available)'
|
|
}, status=status.HTTP_200_OK)
|
|
except (OperationalError, DatabaseError) as db_error:
|
|
logger.error("=" * 80)
|
|
logger.error("DATABASE ERROR: Failed to update tasks")
|
|
logger.error(f" - Error type: {type(db_error).__name__}")
|
|
logger.error(f" - Error message: {str(db_error)}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
return Response({
|
|
'error': f'Database error while updating tasks: {str(db_error)}',
|
|
'type': 'OperationalError',
|
|
'details': 'Failed to update tasks in database. Please check database connection.'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except (OperationalError, DatabaseError) as db_error:
|
|
logger.error("=" * 80)
|
|
logger.error("DATABASE ERROR: Failed during task execution")
|
|
logger.error(f" - Error type: {type(db_error).__name__}")
|
|
logger.error(f" - Error message: {str(db_error)}")
|
|
logger.error(f" - Task IDs: {ids}")
|
|
logger.error(f" - Account ID: {account_id}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': f'Database error during content generation: {str(db_error)}',
|
|
'type': 'OperationalError',
|
|
'details': 'A database operation failed. This may be due to connection issues, constraint violations, or data integrity problems. Check the logs for more details.'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except IntegrityError as integrity_error:
|
|
logger.error("=" * 80)
|
|
logger.error("INTEGRITY ERROR: Data integrity violation")
|
|
logger.error(f" - Error message: {str(integrity_error)}")
|
|
logger.error(f" - Task IDs: {ids}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': f'Data integrity error: {str(integrity_error)}',
|
|
'type': 'IntegrityError',
|
|
'details': 'The operation violated database constraints. This may indicate missing required relationships or invalid data.'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except ValidationError as validation_error:
|
|
logger.error(f"auto_generate_content: ValidationError: {str(validation_error)}")
|
|
return Response({
|
|
'error': f'Validation error: {str(validation_error)}',
|
|
'type': 'ValidationError'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
except Exception as e:
|
|
logger.error("=" * 80)
|
|
logger.error("UNEXPECTED ERROR in auto_generate_content")
|
|
logger.error(f" - Error type: {type(e).__name__}")
|
|
logger.error(f" - Error message: {str(e)}")
|
|
logger.error(f" - Task IDs: {ids}")
|
|
logger.error(f" - Account ID: {account_id}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': f'Unexpected error: {str(e)}',
|
|
'type': type(e).__name__,
|
|
'details': 'An unexpected error occurred. Please check the logs for more details.'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except Exception as outer_error:
|
|
logger.error("=" * 80)
|
|
logger.error("CRITICAL ERROR: Outer exception handler")
|
|
logger.error(f" - Error type: {type(outer_error).__name__}")
|
|
logger.error(f" - Error message: {str(outer_error)}")
|
|
logger.error("=" * 80, exc_info=True)
|
|
|
|
return Response({
|
|
'error': f'Critical error: {str(outer_error)}',
|
|
'type': type(outer_error).__name__
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
|
|
class ImagesViewSet(SiteSectorModelViewSet):
|
|
"""
|
|
ViewSet for managing content images
|
|
"""
|
|
queryset = Images.objects.all()
|
|
serializer_class = ImagesSerializer
|
|
|
|
filter_backends = [DjangoFilterBackend, filters.OrderingFilter]
|
|
ordering_fields = ['created_at', 'position', 'id']
|
|
ordering = ['-id'] # Sort by ID descending (newest first)
|
|
filterset_fields = ['task_id', 'content_id', 'image_type', 'status']
|
|
|
|
def perform_create(self, serializer):
|
|
"""Override to automatically set account"""
|
|
account = getattr(self.request, 'account', None)
|
|
if account:
|
|
serializer.save(account=account)
|
|
else:
|
|
serializer.save()
|
|
|
|
@action(detail=True, methods=['get'], url_path='file', url_name='image_file')
|
|
def serve_image_file(self, request, pk=None):
|
|
"""
|
|
Serve image file from local path via URL
|
|
GET /api/v1/writer/images/{id}/file/
|
|
"""
|
|
import os
|
|
from django.http import FileResponse, Http404
|
|
from django.conf import settings
|
|
|
|
try:
|
|
# Get image directly without account filtering for file serving
|
|
# This allows public access to image files
|
|
try:
|
|
image = Images.objects.get(pk=pk)
|
|
except Images.DoesNotExist:
|
|
return Response({
|
|
'error': 'Image not found'
|
|
}, status=status.HTTP_404_NOT_FOUND)
|
|
|
|
# Check if image has a local path
|
|
if not image.image_path:
|
|
return Response({
|
|
'error': 'No local file path available for this image'
|
|
}, status=status.HTTP_404_NOT_FOUND)
|
|
|
|
file_path = image.image_path
|
|
|
|
# Verify file exists
|
|
if not os.path.exists(file_path):
|
|
return Response({
|
|
'error': f'Image file not found at: {file_path}'
|
|
}, status=status.HTTP_404_NOT_FOUND)
|
|
|
|
# Check if file is readable
|
|
if not os.access(file_path, os.R_OK):
|
|
return Response({
|
|
'error': 'Image file is not readable'
|
|
}, status=status.HTTP_403_FORBIDDEN)
|
|
|
|
# Determine content type from file extension
|
|
import mimetypes
|
|
content_type, _ = mimetypes.guess_type(file_path)
|
|
if not content_type:
|
|
content_type = 'image/png' # Default to PNG
|
|
|
|
# Serve the file
|
|
try:
|
|
return FileResponse(
|
|
open(file_path, 'rb'),
|
|
content_type=content_type,
|
|
filename=os.path.basename(file_path)
|
|
)
|
|
except Exception as e:
|
|
return Response({
|
|
'error': f'Failed to serve file: {str(e)}'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
except Images.DoesNotExist:
|
|
return Response({
|
|
'error': 'Image not found'
|
|
}, status=status.HTTP_404_NOT_FOUND)
|
|
except Exception as e:
|
|
import logging
|
|
logger = logging.getLogger(__name__)
|
|
logger.error(f"Error serving image file: {str(e)}", exc_info=True)
|
|
return Response({
|
|
'error': f'Failed to serve image: {str(e)}'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
@action(detail=False, methods=['post'], url_path='auto_generate', url_name='auto_generate_images')
|
|
def auto_generate_images(self, request):
|
|
"""Auto-generate images for tasks using AI"""
|
|
task_ids = request.data.get('task_ids', [])
|
|
if not task_ids:
|
|
return Response({'error': 'No task IDs provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
if len(task_ids) > 10:
|
|
return Response({'error': 'Maximum 10 tasks allowed for image generation'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
# Get account
|
|
account = getattr(request, 'account', None)
|
|
account_id = account.id if account else None
|
|
|
|
# Try to queue Celery task, fall back to synchronous if Celery not available
|
|
try:
|
|
from igny8_core.ai.tasks import run_ai_task
|
|
from kombu.exceptions import OperationalError as KombuOperationalError
|
|
|
|
if hasattr(run_ai_task, 'delay'):
|
|
# Celery is available - queue async task
|
|
task = run_ai_task.delay(
|
|
function_name='generate_images',
|
|
payload={'ids': task_ids},
|
|
account_id=account_id
|
|
)
|
|
return Response({
|
|
'success': True,
|
|
'task_id': str(task.id),
|
|
'message': 'Image generation started'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
# Celery not available - execute synchronously
|
|
result = run_ai_task(
|
|
function_name='generate_images',
|
|
payload={'ids': task_ids},
|
|
account_id=account_id
|
|
)
|
|
if result.get('success'):
|
|
return Response({
|
|
'success': True,
|
|
'images_created': result.get('count', 0),
|
|
'message': result.get('message', 'Image generation completed')
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
return Response({
|
|
'error': result.get('error', 'Image generation failed')
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
except KombuOperationalError as e:
|
|
return Response({
|
|
'error': 'Task queue unavailable. Please try again.',
|
|
'type': 'QueueError'
|
|
}, status=status.HTTP_503_SERVICE_UNAVAILABLE)
|
|
except ImportError:
|
|
# Tasks module not available
|
|
return Response({
|
|
'error': 'Image generation task not available'
|
|
}, status=status.HTTP_503_SERVICE_UNAVAILABLE)
|
|
except Exception as e:
|
|
import logging
|
|
logger = logging.getLogger(__name__)
|
|
logger.error(f"Error queuing image generation task: {str(e)}", exc_info=True)
|
|
return Response({
|
|
'error': f'Failed to start image generation: {str(e)}',
|
|
'type': 'TaskError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
@action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update')
|
|
def bulk_update(self, request):
|
|
"""Bulk update image status by content_id or image IDs"""
|
|
content_id = request.data.get('content_id')
|
|
image_ids = request.data.get('ids', [])
|
|
status_value = request.data.get('status')
|
|
|
|
if not status_value:
|
|
return Response({'error': 'No status provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
queryset = self.get_queryset()
|
|
|
|
# Update by content_id if provided, otherwise by image IDs
|
|
if content_id:
|
|
updated_count = queryset.filter(content_id=content_id).update(status=status_value)
|
|
elif image_ids:
|
|
updated_count = queryset.filter(id__in=image_ids).update(status=status_value)
|
|
else:
|
|
return Response({'error': 'Either content_id or ids must be provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
return Response({'updated_count': updated_count}, status=status.HTTP_200_OK)
|
|
|
|
@action(detail=False, methods=['get'], url_path='content_images', url_name='content_images')
|
|
def content_images(self, request):
|
|
"""Get images grouped by content - one row per content with featured and in-article images"""
|
|
from .serializers import ContentImagesGroupSerializer, ContentImageSerializer
|
|
|
|
account = getattr(request, 'account', None)
|
|
|
|
# Get all content that has images (either directly or via task)
|
|
# First, get content with direct image links
|
|
queryset = Content.objects.filter(images__isnull=False)
|
|
if account:
|
|
queryset = queryset.filter(account=account)
|
|
|
|
# Also get content from images linked via task
|
|
task_linked_images = Images.objects.filter(task__isnull=False, content__isnull=True)
|
|
if account:
|
|
task_linked_images = task_linked_images.filter(account=account)
|
|
|
|
# Get content IDs from task-linked images
|
|
task_content_ids = set()
|
|
for image in task_linked_images:
|
|
if image.task and hasattr(image.task, 'content_record'):
|
|
try:
|
|
content = image.task.content_record
|
|
if content:
|
|
task_content_ids.add(content.id)
|
|
except Exception:
|
|
pass
|
|
|
|
# Combine both sets of content IDs
|
|
content_ids = set(queryset.values_list('id', flat=True).distinct())
|
|
content_ids.update(task_content_ids)
|
|
|
|
# Build grouped response
|
|
grouped_data = []
|
|
for content_id in content_ids:
|
|
try:
|
|
content = Content.objects.get(id=content_id)
|
|
# Get images linked directly to content OR via task
|
|
content_images = Images.objects.filter(
|
|
Q(content=content) | Q(task=content.task)
|
|
).order_by('position')
|
|
|
|
# Get featured image
|
|
featured_image = content_images.filter(image_type='featured').first()
|
|
|
|
# Get in-article images (sorted by position)
|
|
in_article_images = list(content_images.filter(image_type='in_article').order_by('position'))
|
|
|
|
# Determine overall status
|
|
all_images = list(content_images)
|
|
if not all_images:
|
|
overall_status = 'pending'
|
|
elif all(img.status == 'generated' for img in all_images):
|
|
overall_status = 'complete'
|
|
elif any(img.status == 'failed' for img in all_images):
|
|
overall_status = 'failed'
|
|
elif any(img.status == 'generated' for img in all_images):
|
|
overall_status = 'partial'
|
|
else:
|
|
overall_status = 'pending'
|
|
|
|
grouped_data.append({
|
|
'content_id': content.id,
|
|
'content_title': content.title or content.meta_title or f"Content #{content.id}",
|
|
'featured_image': ContentImageSerializer(featured_image).data if featured_image else None,
|
|
'in_article_images': [ContentImageSerializer(img).data for img in in_article_images],
|
|
'overall_status': overall_status,
|
|
})
|
|
except Content.DoesNotExist:
|
|
continue
|
|
|
|
# Sort by content title
|
|
grouped_data.sort(key=lambda x: x['content_title'])
|
|
|
|
return Response({
|
|
'count': len(grouped_data),
|
|
'results': grouped_data
|
|
}, status=status.HTTP_200_OK)
|
|
|
|
@action(detail=False, methods=['post'], url_path='generate_images', url_name='generate_images')
|
|
def generate_images(self, request):
|
|
"""Generate images from prompts - queues Celery task for sequential processing"""
|
|
from igny8_core.ai.tasks import process_image_generation_queue
|
|
|
|
account = getattr(request, 'account', None)
|
|
image_ids = request.data.get('ids', [])
|
|
content_id = request.data.get('content_id')
|
|
|
|
if not image_ids:
|
|
return Response({
|
|
'error': 'No image IDs provided',
|
|
'type': 'ValidationError'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
account_id = account.id if account else None
|
|
|
|
# Queue Celery task
|
|
try:
|
|
if hasattr(process_image_generation_queue, 'delay'):
|
|
task = process_image_generation_queue.delay(
|
|
image_ids=image_ids,
|
|
account_id=account_id,
|
|
content_id=content_id
|
|
)
|
|
return Response({
|
|
'success': True,
|
|
'task_id': str(task.id),
|
|
'message': 'Image generation started'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
# Fallback to synchronous execution (for testing)
|
|
result = process_image_generation_queue(
|
|
image_ids=image_ids,
|
|
account_id=account_id,
|
|
content_id=content_id
|
|
)
|
|
return Response(result, status=status.HTTP_200_OK)
|
|
except Exception as e:
|
|
logger.error(f"[generate_images] Error: {str(e)}", exc_info=True)
|
|
return Response({
|
|
'error': str(e),
|
|
'type': 'ExecutionError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
class ContentViewSet(SiteSectorModelViewSet):
|
|
"""
|
|
ViewSet for managing task content
|
|
"""
|
|
queryset = Content.objects.all()
|
|
serializer_class = ContentSerializer
|
|
pagination_class = CustomPageNumberPagination
|
|
|
|
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
|
|
search_fields = ['title', 'meta_title', 'primary_keyword']
|
|
ordering_fields = ['generated_at', 'updated_at', 'word_count', 'status']
|
|
ordering = ['-generated_at']
|
|
filterset_fields = ['task_id', 'status']
|
|
|
|
def perform_create(self, serializer):
|
|
"""Override to automatically set account"""
|
|
account = getattr(self.request, 'account', None)
|
|
if account:
|
|
serializer.save(account=account)
|
|
else:
|
|
serializer.save()
|
|
|
|
@action(detail=False, methods=['post'], url_path='generate_image_prompts', url_name='generate_image_prompts')
|
|
def generate_image_prompts(self, request):
|
|
"""Generate image prompts for content records - same pattern as other AI functions"""
|
|
from igny8_core.ai.tasks import run_ai_task
|
|
|
|
account = getattr(request, 'account', None)
|
|
ids = request.data.get('ids', [])
|
|
|
|
if not ids:
|
|
return Response({
|
|
'error': 'No IDs provided',
|
|
'type': 'ValidationError'
|
|
}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
account_id = account.id if account else None
|
|
|
|
# Queue Celery task
|
|
try:
|
|
if hasattr(run_ai_task, 'delay'):
|
|
task = run_ai_task.delay(
|
|
function_name='generate_image_prompts',
|
|
payload={'ids': ids},
|
|
account_id=account_id
|
|
)
|
|
return Response({
|
|
'success': True,
|
|
'task_id': str(task.id),
|
|
'message': 'Image prompt generation started'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
# Fallback to synchronous execution
|
|
result = run_ai_task(
|
|
function_name='generate_image_prompts',
|
|
payload={'ids': ids},
|
|
account_id=account_id
|
|
)
|
|
if result.get('success'):
|
|
return Response({
|
|
'success': True,
|
|
'prompts_created': result.get('count', 0),
|
|
'message': 'Image prompts generated successfully'
|
|
}, status=status.HTTP_200_OK)
|
|
else:
|
|
return Response({
|
|
'error': result.get('error', 'Image prompt generation failed'),
|
|
'type': 'TaskExecutionError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
except Exception as e:
|
|
return Response({
|
|
'error': str(e),
|
|
'type': 'ExecutionError'
|
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|