Automation Part 1

This commit is contained in:
IGNY8 VPS (Salman)
2025-12-03 08:07:43 +00:00
parent 5d96e1a2bd
commit b9774aafa2
23 changed files with 3444 additions and 1 deletions

View File

@@ -0,0 +1,7 @@
"""
Automation Services
"""
from .automation_service import AutomationService
from .automation_logger import AutomationLogger
__all__ = ['AutomationService', 'AutomationLogger']

View File

@@ -0,0 +1,153 @@
"""
Automation Logger Service
Handles file-based logging for automation runs
"""
import os
import logging
from datetime import datetime
from pathlib import Path
from typing import List
logger = logging.getLogger(__name__)
class AutomationLogger:
"""File-based logging for automation runs"""
def __init__(self, base_log_dir: str = 'logs/automation'):
self.base_log_dir = base_log_dir
def start_run(self, account_id: int, site_id: int, trigger_type: str) -> str:
"""
Create log directory structure and return run_id
Returns:
run_id in format: run_20251203_140523_manual
"""
# Generate run_id
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
run_id = f"run_{timestamp}_{trigger_type}"
# Create directory structure
run_dir = self._get_run_dir(account_id, site_id, run_id)
os.makedirs(run_dir, exist_ok=True)
# Create main log file
log_file = os.path.join(run_dir, 'automation_run.log')
with open(log_file, 'w') as f:
f.write("=" * 80 + "\n")
f.write(f"AUTOMATION RUN: {run_id}\n")
f.write(f"Started: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
f.write(f"Trigger: {trigger_type}\n")
f.write(f"Account: {account_id}\n")
f.write(f"Site: {site_id}\n")
f.write("=" * 80 + "\n\n")
logger.info(f"[AutomationLogger] Created run: {run_id}")
return run_id
def log_stage_start(self, run_id: str, account_id: int, site_id: int, stage_number: int, stage_name: str, pending_count: int):
"""Log stage start"""
timestamp = self._timestamp()
# Main log
self._append_to_main_log(account_id, site_id, run_id,
f"{timestamp} - Stage {stage_number} starting: {stage_name}")
self._append_to_main_log(account_id, site_id, run_id,
f"{timestamp} - Stage {stage_number}: Found {pending_count} pending items")
# Stage-specific log
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
with open(stage_log, 'w') as f:
f.write("=" * 80 + "\n")
f.write(f"STAGE {stage_number}: {stage_name}\n")
f.write(f"Started: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
f.write("=" * 80 + "\n\n")
f.write(f"{timestamp} - Found {pending_count} pending items\n")
def log_stage_progress(self, run_id: str, account_id: int, site_id: int, stage_number: int, message: str):
"""Log stage progress"""
timestamp = self._timestamp()
log_message = f"{timestamp} - Stage {stage_number}: {message}"
# Main log
self._append_to_main_log(account_id, site_id, run_id, log_message)
# Stage-specific log
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
with open(stage_log, 'a') as f:
f.write(f"{log_message}\n")
def log_stage_complete(self, run_id: str, account_id: int, site_id: int, stage_number: int,
processed_count: int, time_elapsed: str, credits_used: int):
"""Log stage completion"""
timestamp = self._timestamp()
# Main log
self._append_to_main_log(account_id, site_id, run_id,
f"{timestamp} - Stage {stage_number} complete: {processed_count} items processed")
# Stage-specific log
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
with open(stage_log, 'a') as f:
f.write("\n" + "=" * 80 + "\n")
f.write(f"STAGE {stage_number} COMPLETE\n")
f.write(f"Total Time: {time_elapsed}\n")
f.write(f"Processed: {processed_count} items\n")
f.write(f"Credits Used: {credits_used}\n")
f.write("=" * 80 + "\n")
def log_stage_error(self, run_id: str, account_id: int, site_id: int, stage_number: int, error_message: str):
"""Log stage error"""
timestamp = self._timestamp()
log_message = f"{timestamp} - Stage {stage_number} ERROR: {error_message}"
# Main log
self._append_to_main_log(account_id, site_id, run_id, log_message)
# Stage-specific log
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
with open(stage_log, 'a') as f:
f.write(f"\n{log_message}\n")
def get_activity_log(self, account_id: int, site_id: int, run_id: str, last_n: int = 50) -> List[str]:
"""
Get last N lines from main activity log
Returns:
List of log lines (newest first)
"""
log_file = os.path.join(self._get_run_dir(account_id, site_id, run_id), 'automation_run.log')
if not os.path.exists(log_file):
return []
with open(log_file, 'r') as f:
lines = f.readlines()
# Filter out header lines and empty lines
activity_lines = [line.strip() for line in lines if line.strip() and not line.startswith('=')]
# Return last N lines (newest first)
return list(reversed(activity_lines[-last_n:]))
# Helper methods
def _get_run_dir(self, account_id: int, site_id: int, run_id: str) -> str:
"""Get run directory path"""
return os.path.join(self.base_log_dir, str(account_id), str(site_id), run_id)
def _get_stage_log_path(self, account_id: int, site_id: int, run_id: str, stage_number: int) -> str:
"""Get stage log file path"""
run_dir = self._get_run_dir(account_id, site_id, run_id)
return os.path.join(run_dir, f'stage_{stage_number}.log')
def _append_to_main_log(self, account_id: int, site_id: int, run_id: str, message: str):
"""Append message to main log file"""
log_file = os.path.join(self._get_run_dir(account_id, site_id, run_id), 'automation_run.log')
with open(log_file, 'a') as f:
f.write(f"{message}\n")
def _timestamp(self) -> str:
"""Get formatted timestamp"""
return datetime.now().strftime('%H:%M:%S')

View File

@@ -0,0 +1,824 @@
"""
Automation Service
Core orchestrator that executes AI function stages sequentially
"""
import logging
import time
from datetime import datetime, timedelta
from typing import Dict, Optional, List
from django.db import transaction
from django.db.models import Count, Q, F
from django.core.cache import cache
from celery.result import AsyncResult
from igny8_core.business.automation.models import AutomationRun, AutomationConfig
from igny8_core.business.automation.services.automation_logger import AutomationLogger
from igny8_core.modules.system.models import Account, Site
from igny8_core.modules.planner.models import Keywords, Clusters, ContentIdeas
from igny8_core.modules.writer.models import Tasks, Content, Images
from igny8_core.business.content.models import AIUsageLog
# AI Functions
from igny8_core.ai.functions.auto_cluster import AutoCluster
from igny8_core.ai.functions.generate_ideas import GenerateIdeas
from igny8_core.ai.functions.generate_content import GenerateContent
from igny8_core.ai.functions.generate_image_prompts import GenerateImagePromptsFunction
from igny8_core.ai.functions.generate_images import GenerateImages
logger = logging.getLogger(__name__)
class AutomationService:
"""Orchestrates AI functions into automated pipeline"""
def __init__(self, account: Account, site: Site):
self.account = account
self.site = site
self.logger = AutomationLogger()
self.run = None
self.config = None
# Load or create config
self.config, _ = AutomationConfig.objects.get_or_create(
account=account,
site=site,
defaults={
'is_enabled': False,
'frequency': 'daily',
'scheduled_time': '02:00',
}
)
@classmethod
def from_run_id(cls, run_id: str) -> 'AutomationService':
"""Create service instance from run_id"""
run = AutomationRun.objects.get(run_id=run_id)
service = cls(run.account, run.site)
service.run = run
return service
def start_automation(self, trigger_type: str = 'manual') -> str:
"""
Start automation run
Returns:
run_id
"""
# Check for concurrent run
if AutomationRun.objects.filter(site=self.site, status='running').exists():
raise ValueError("Automation already running for this site")
# Acquire distributed lock
lock_key = f'automation_lock_{self.site.id}'
if not cache.add(lock_key, 'locked', timeout=21600): # 6 hours
raise ValueError("Automation already running for this site (cache lock)")
try:
# Estimate credits needed
estimated_credits = self.estimate_credits()
# Check credit balance (with 20% buffer)
required_credits = int(estimated_credits * 1.2)
if self.account.credits_balance < required_credits:
raise ValueError(f"Insufficient credits. Need ~{required_credits}, you have {self.account.credits_balance}")
# Create run_id and log files
run_id = self.logger.start_run(self.account.id, self.site.id, trigger_type)
# Create AutomationRun record
self.run = AutomationRun.objects.create(
run_id=run_id,
account=self.account,
site=self.site,
trigger_type=trigger_type,
status='running',
current_stage=1,
)
# Log start
self.logger.log_stage_progress(
run_id, self.account.id, self.site.id, 0,
f"Automation started (trigger: {trigger_type})"
)
self.logger.log_stage_progress(
run_id, self.account.id, self.site.id, 0,
f"Credit check: Account has {self.account.credits_balance} credits, estimated need: {estimated_credits} credits"
)
logger.info(f"[AutomationService] Started run: {run_id}")
return run_id
except Exception as e:
# Release lock on failure
cache.delete(lock_key)
raise
def run_stage_1(self):
"""Stage 1: Keywords → Clusters"""
stage_number = 1
stage_name = "Keywords → Clusters (AI)"
start_time = time.time()
# Query pending keywords
pending_keywords = Keywords.objects.filter(
site=self.site,
status='new',
cluster__isnull=True,
disabled=False
)
total_count = pending_keywords.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No keywords to process - skipping stage"
)
self.run.stage_1_result = {'keywords_processed': 0, 'clusters_created': 0, 'batches_run': 0, 'credits_used': 0}
self.run.current_stage = 2
self.run.save()
return
# Process in batches
batch_size = self.config.stage_1_batch_size
keywords_processed = 0
clusters_created = 0
batches_run = 0
credits_before = self._get_credits_used()
keyword_ids = list(pending_keywords.values_list('id', flat=True))
for i in range(0, len(keyword_ids), batch_size):
batch = keyword_ids[i:i + batch_size]
batch_num = (i // batch_size) + 1
total_batches = (len(keyword_ids) + batch_size - 1) // batch_size
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Processing batch {batch_num}/{total_batches} ({len(batch)} keywords)"
)
# Call AI function
result = AutoCluster().execute(
payload={'ids': batch},
account=self.account
)
# Monitor task
task_id = result.get('task_id')
if task_id:
self._wait_for_task(task_id, stage_number, f"Batch {batch_num}")
keywords_processed += len(batch)
batches_run += 1
# Log progress
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Batch {batch_num} complete"
)
# Get clusters created count
clusters_created = Clusters.objects.filter(
site=self.site,
created_at__gte=self.run.started_at
).count()
# Calculate credits used
credits_used = self._get_credits_used() - credits_before
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, keywords_processed, time_elapsed, credits_used
)
# Save results
self.run.stage_1_result = {
'keywords_processed': keywords_processed,
'clusters_created': clusters_created,
'batches_run': batches_run,
'credits_used': credits_used
}
self.run.current_stage = 2
self.run.total_credits_used += credits_used
self.run.save()
logger.info(f"[AutomationService] Stage 1 complete: {keywords_processed} keywords → {clusters_created} clusters")
def run_stage_2(self):
"""Stage 2: Clusters → Ideas"""
stage_number = 2
stage_name = "Clusters → Ideas (AI)"
start_time = time.time()
# Query clusters without ideas
pending_clusters = Clusters.objects.filter(
site=self.site,
status='new',
disabled=False
).exclude(
ideas__isnull=False
)
total_count = pending_clusters.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No clusters to process - skipping stage"
)
self.run.stage_2_result = {'clusters_processed': 0, 'ideas_created': 0, 'credits_used': 0}
self.run.current_stage = 3
self.run.save()
return
# Process one at a time
clusters_processed = 0
credits_before = self._get_credits_used()
for cluster in pending_clusters:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Generating ideas for cluster: {cluster.name}"
)
# Call AI function
result = GenerateIdeas().execute(
payload={'ids': [cluster.id]},
account=self.account
)
# Monitor task
task_id = result.get('task_id')
if task_id:
self._wait_for_task(task_id, stage_number, f"Cluster '{cluster.name}'")
clusters_processed += 1
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Cluster '{cluster.name}' complete"
)
# Get ideas created count
ideas_created = ContentIdeas.objects.filter(
site=self.site,
created_at__gte=self.run.started_at
).count()
# Calculate credits used
credits_used = self._get_credits_used() - credits_before
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, clusters_processed, time_elapsed, credits_used
)
# Save results
self.run.stage_2_result = {
'clusters_processed': clusters_processed,
'ideas_created': ideas_created,
'credits_used': credits_used
}
self.run.current_stage = 3
self.run.total_credits_used += credits_used
self.run.save()
logger.info(f"[AutomationService] Stage 2 complete: {clusters_processed} clusters → {ideas_created} ideas")
def run_stage_3(self):
"""Stage 3: Ideas → Tasks (Local Queue)"""
stage_number = 3
stage_name = "Ideas → Tasks (Local Queue)"
start_time = time.time()
# Query pending ideas
pending_ideas = ContentIdeas.objects.filter(
site=self.site,
status='new'
)
total_count = pending_ideas.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No ideas to process - skipping stage"
)
self.run.stage_3_result = {'ideas_processed': 0, 'tasks_created': 0, 'batches_run': 0}
self.run.current_stage = 4
self.run.save()
return
# Process in batches
batch_size = self.config.stage_3_batch_size
ideas_processed = 0
tasks_created = 0
batches_run = 0
idea_list = list(pending_ideas)
for i in range(0, len(idea_list), batch_size):
batch = idea_list[i:i + batch_size]
batch_num = (i // batch_size) + 1
total_batches = (len(idea_list) + batch_size - 1) // batch_size
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Queueing batch {batch_num}/{total_batches} ({len(batch)} ideas)"
)
# Create tasks (local operation)
for idea in batch:
# Build keywords string
keywords_str = ''
if idea.keyword_objects.exists():
keywords_str = ', '.join([kw.keyword for kw in idea.keyword_objects.all()])
elif idea.target_keywords:
keywords_str = idea.target_keywords
# Create task
task = Tasks.objects.create(
title=idea.idea_title,
description=idea.description or '',
cluster=idea.keyword_cluster,
content_type=idea.content_type or 'post',
content_structure=idea.content_structure or 'article',
keywords=keywords_str,
status='queued',
account=idea.account,
site=idea.site,
sector=idea.sector,
idea=idea,
)
# Update idea status
idea.status = 'queued'
idea.save()
tasks_created += 1
ideas_processed += len(batch)
batches_run += 1
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Batch {batch_num} complete: {len(batch)} tasks created"
)
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, ideas_processed, time_elapsed, 0 # No credits for local operation
)
# Save results
self.run.stage_3_result = {
'ideas_processed': ideas_processed,
'tasks_created': tasks_created,
'batches_run': batches_run
}
self.run.current_stage = 4
self.run.save()
logger.info(f"[AutomationService] Stage 3 complete: {ideas_processed} ideas → {tasks_created} tasks")
def run_stage_4(self):
"""Stage 4: Tasks → Content"""
stage_number = 4
stage_name = "Tasks → Content (AI)"
start_time = time.time()
# Query queued tasks
pending_tasks = Tasks.objects.filter(
site=self.site,
status='queued',
content__isnull=True
)
total_count = pending_tasks.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No tasks to process - skipping stage"
)
self.run.stage_4_result = {'tasks_processed': 0, 'content_created': 0, 'total_words': 0, 'credits_used': 0}
self.run.current_stage = 5
self.run.save()
return
# Process one at a time
tasks_processed = 0
credits_before = self._get_credits_used()
for task in pending_tasks:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Generating content for task: {task.title}"
)
# Call AI function
result = GenerateContent().execute(
payload={'ids': [task.id]},
account=self.account
)
# Monitor task
task_id = result.get('task_id')
if task_id:
self._wait_for_task(task_id, stage_number, f"Task '{task.title}'")
tasks_processed += 1
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Task '{task.title}' complete"
)
# Get content created count and total words
content_created = Content.objects.filter(
site=self.site,
created_at__gte=self.run.started_at
).count()
total_words = Content.objects.filter(
site=self.site,
created_at__gte=self.run.started_at
).aggregate(total=Count('id'))['total'] * 2500 # Estimate
# Calculate credits used
credits_used = self._get_credits_used() - credits_before
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, tasks_processed, time_elapsed, credits_used
)
# Save results
self.run.stage_4_result = {
'tasks_processed': tasks_processed,
'content_created': content_created,
'total_words': total_words,
'credits_used': credits_used
}
self.run.current_stage = 5
self.run.total_credits_used += credits_used
self.run.save()
logger.info(f"[AutomationService] Stage 4 complete: {tasks_processed} tasks → {content_created} content")
def run_stage_5(self):
"""Stage 5: Content → Image Prompts"""
stage_number = 5
stage_name = "Content → Image Prompts (AI)"
start_time = time.time()
# Query content without Images records
content_without_images = Content.objects.filter(
site=self.site,
status='draft'
).annotate(
images_count=Count('images')
).filter(
images_count=0
)
total_count = content_without_images.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No content to process - skipping stage"
)
self.run.stage_5_result = {'content_processed': 0, 'prompts_created': 0, 'credits_used': 0}
self.run.current_stage = 6
self.run.save()
return
# Process one at a time
content_processed = 0
credits_before = self._get_credits_used()
for content in content_without_images:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Extracting prompts from: {content.title}"
)
# Call AI function
result = GenerateImagePromptsFunction().execute(
payload={'ids': [content.id]},
account=self.account
)
# Monitor task
task_id = result.get('task_id')
if task_id:
self._wait_for_task(task_id, stage_number, f"Content '{content.title}'")
content_processed += 1
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Content '{content.title}' complete"
)
# Get prompts created count
prompts_created = Images.objects.filter(
site=self.site,
status='pending',
created_at__gte=self.run.started_at
).count()
# Calculate credits used
credits_used = self._get_credits_used() - credits_before
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, content_processed, time_elapsed, credits_used
)
# Save results
self.run.stage_5_result = {
'content_processed': content_processed,
'prompts_created': prompts_created,
'credits_used': credits_used
}
self.run.current_stage = 6
self.run.total_credits_used += credits_used
self.run.save()
logger.info(f"[AutomationService] Stage 5 complete: {content_processed} content → {prompts_created} prompts")
def run_stage_6(self):
"""Stage 6: Image Prompts → Generated Images"""
stage_number = 6
stage_name = "Images (Prompts) → Generated Images (AI)"
start_time = time.time()
# Query pending images
pending_images = Images.objects.filter(
site=self.site,
status='pending'
)
total_count = pending_images.count()
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
if total_count == 0:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, "No images to process - skipping stage"
)
self.run.stage_6_result = {'images_processed': 0, 'images_generated': 0, 'content_moved_to_review': 0, 'credits_used': 0}
self.run.current_stage = 7
self.run.save()
return
# Process one at a time
images_processed = 0
credits_before = self._get_credits_used()
for image in pending_images:
content_title = image.content.title if image.content else 'Unknown'
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Generating image: {image.image_type} for '{content_title}'"
)
# Call AI function
result = GenerateImages().execute(
payload={'image_ids': [image.id]},
account=self.account
)
# Monitor task
task_id = result.get('task_id')
if task_id:
self._wait_for_task(task_id, stage_number, f"Image for '{content_title}'")
images_processed += 1
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Image generated for '{content_title}'"
)
# Get images generated count
images_generated = Images.objects.filter(
site=self.site,
status='generated',
updated_at__gte=self.run.started_at
).count()
# Count content moved to review (automatic side effect)
content_moved_to_review = Content.objects.filter(
site=self.site,
status='review',
updated_at__gte=self.run.started_at
).count()
# Calculate credits used
credits_used = self._get_credits_used() - credits_before
# Calculate time elapsed
time_elapsed = self._format_time_elapsed(start_time)
# Log completion
self.logger.log_stage_complete(
self.run.run_id, self.account.id, self.site.id,
stage_number, images_processed, time_elapsed, credits_used
)
# Save results
self.run.stage_6_result = {
'images_processed': images_processed,
'images_generated': images_generated,
'content_moved_to_review': content_moved_to_review,
'credits_used': credits_used
}
self.run.current_stage = 7
self.run.total_credits_used += credits_used
self.run.save()
logger.info(f"[AutomationService] Stage 6 complete: {images_processed} images generated, {content_moved_to_review} content moved to review")
def run_stage_7(self):
"""Stage 7: Manual Review Gate (Count Only)"""
stage_number = 7
stage_name = "Manual Review Gate"
# Query content ready for review
ready_for_review = Content.objects.filter(
site=self.site,
status='review'
)
total_count = ready_for_review.count()
content_ids = list(ready_for_review.values_list('id', flat=True))
# Log stage start
self.logger.log_stage_start(
self.run.run_id, self.account.id, self.site.id,
stage_number, stage_name, total_count
)
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Automation complete. {total_count} content pieces ready for review"
)
if content_ids:
self.logger.log_stage_progress(
self.run.run_id, self.account.id, self.site.id,
stage_number, f"Content IDs ready: {content_ids[:10]}..." if len(content_ids) > 10 else f"Content IDs ready: {content_ids}"
)
# Save results
self.run.stage_7_result = {
'ready_for_review': total_count,
'content_ids': content_ids
}
self.run.status = 'completed'
self.run.completed_at = datetime.now()
self.run.save()
# Release lock
cache.delete(f'automation_lock_{self.site.id}')
logger.info(f"[AutomationService] Stage 7 complete: Automation ended, {total_count} content ready for review")
def pause_automation(self):
"""Pause current automation run"""
if self.run:
self.run.status = 'paused'
self.run.save()
logger.info(f"[AutomationService] Paused run: {self.run.run_id}")
def resume_automation(self):
"""Resume paused automation run"""
if self.run and self.run.status == 'paused':
self.run.status = 'running'
self.run.save()
logger.info(f"[AutomationService] Resumed run: {self.run.run_id}")
def estimate_credits(self) -> int:
"""Estimate total credits needed for automation"""
# Count items
keywords_count = Keywords.objects.filter(site=self.site, status='new', cluster__isnull=True).count()
clusters_count = Clusters.objects.filter(site=self.site, status='new').exclude(ideas__isnull=False).count()
ideas_count = ContentIdeas.objects.filter(site=self.site, status='new').count()
tasks_count = Tasks.objects.filter(site=self.site, status='queued', content__isnull=True).count()
content_count = Content.objects.filter(site=self.site, status='draft').annotate(images_count=Count('images')).filter(images_count=0).count()
# Estimate credits
clustering_credits = (keywords_count // 5) + 1 # 1 credit per 5 keywords
ideas_credits = clusters_count * 2 # 2 credits per cluster
content_credits = tasks_count * 5 # Assume 2500 words avg = 5 credits
prompts_credits = content_count * 2 # Assume 4 prompts per content = 2 credits
images_credits = content_count * 8 # Assume 4 images * 2 credits avg
total = clustering_credits + ideas_credits + content_credits + prompts_credits + images_credits
logger.info(f"[AutomationService] Estimated credits: {total}")
return total
# Helper methods
def _wait_for_task(self, task_id: str, stage_number: int, item_name: str):
"""Wait for Celery task to complete"""
result = AsyncResult(task_id)
while not result.ready():
time.sleep(3) # Poll every 3 seconds
# Check for pause
self.run.refresh_from_db()
if self.run.status == 'paused':
logger.info(f"[AutomationService] Paused during {item_name}")
# Wait until resumed
while self.run.status == 'paused':
time.sleep(5)
self.run.refresh_from_db()
if result.failed():
error_msg = f"Task failed for {item_name}"
self.logger.log_stage_error(
self.run.run_id, self.account.id, self.site.id,
stage_number, error_msg
)
raise Exception(error_msg)
def _get_credits_used(self) -> int:
"""Get total credits used by this run so far"""
if not self.run:
return 0
total = AIUsageLog.objects.filter(
account=self.account,
created_at__gte=self.run.started_at
).aggregate(total=Count('id'))['total'] or 0
return total
def _format_time_elapsed(self, start_time: float) -> str:
"""Format elapsed time"""
elapsed = time.time() - start_time
minutes = int(elapsed // 60)
seconds = int(elapsed % 60)
return f"{minutes}m {seconds}s"