Automation Part 1
This commit is contained in:
116
backend/deploy_automation.sh
Normal file
116
backend/deploy_automation.sh
Normal file
@@ -0,0 +1,116 @@
|
||||
#!/bin/bash
|
||||
# Automation System Deployment Script
|
||||
# Run this script to complete the automation system deployment
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
echo "========================================="
|
||||
echo "IGNY8 Automation System Deployment"
|
||||
echo "========================================="
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if running from correct directory
|
||||
if [ ! -f "manage.py" ]; then
|
||||
echo -e "${RED}Error: Please run this script from the backend directory${NC}"
|
||||
echo "cd /data/app/igny8/backend && ./deploy_automation.sh"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Step 1: Creating log directory...${NC}"
|
||||
mkdir -p logs/automation
|
||||
chmod 755 logs/automation
|
||||
echo -e "${GREEN}✓ Log directory created${NC}"
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}Step 2: Running database migrations...${NC}"
|
||||
python3 manage.py makemigrations
|
||||
python3 manage.py migrate
|
||||
echo -e "${GREEN}✓ Migrations complete${NC}"
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}Step 3: Checking Celery services...${NC}"
|
||||
if docker ps | grep -q celery; then
|
||||
echo -e "${GREEN}✓ Celery worker is running${NC}"
|
||||
else
|
||||
echo -e "${RED}⚠ Celery worker is NOT running${NC}"
|
||||
echo "Start with: docker-compose up -d celery"
|
||||
fi
|
||||
|
||||
if docker ps | grep -q beat; then
|
||||
echo -e "${GREEN}✓ Celery beat is running${NC}"
|
||||
else
|
||||
echo -e "${RED}⚠ Celery beat is NOT running${NC}"
|
||||
echo "Start with: docker-compose up -d celery-beat"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}Step 4: Verifying cache backend...${NC}"
|
||||
python3 -c "
|
||||
from django.core.cache import cache
|
||||
try:
|
||||
cache.set('test_key', 'test_value', 10)
|
||||
if cache.get('test_key') == 'test_value':
|
||||
print('${GREEN}✓ Cache backend working${NC}')
|
||||
else:
|
||||
print('${RED}⚠ Cache backend not working properly${NC}')
|
||||
except Exception as e:
|
||||
print('${RED}⚠ Cache backend error:', str(e), '${NC}')
|
||||
" || echo -e "${RED}⚠ Could not verify cache backend${NC}"
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}Step 5: Testing automation API...${NC}"
|
||||
python3 manage.py shell << EOF
|
||||
from igny8_core.business.automation.services import AutomationService
|
||||
from igny8_core.modules.system.models import Account, Site
|
||||
|
||||
try:
|
||||
account = Account.objects.first()
|
||||
site = Site.objects.first()
|
||||
if account and site:
|
||||
service = AutomationService(account, site)
|
||||
estimate = service.estimate_credits()
|
||||
print('${GREEN}✓ AutomationService working - Estimated credits:', estimate, '${NC}')
|
||||
else:
|
||||
print('${YELLOW}⚠ No account or site found - create one first${NC}')
|
||||
except Exception as e:
|
||||
print('${RED}⚠ AutomationService error:', str(e), '${NC}')
|
||||
EOF
|
||||
echo ""
|
||||
|
||||
echo -e "${YELLOW}Step 6: Checking Celery beat schedule...${NC}"
|
||||
if docker ps | grep -q celery; then
|
||||
CELERY_CONTAINER=$(docker ps | grep celery | grep -v beat | awk '{print $1}')
|
||||
docker exec $CELERY_CONTAINER celery -A igny8_core inspect scheduled 2>/dev/null | grep -q "check-scheduled-automations" && \
|
||||
echo -e "${GREEN}✓ Automation task scheduled in Celery beat${NC}" || \
|
||||
echo -e "${YELLOW}⚠ Automation task not found in schedule (may need restart)${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠ Celery worker not running - cannot check schedule${NC}"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo "========================================="
|
||||
echo -e "${GREEN}Deployment Steps Completed!${NC}"
|
||||
echo "========================================="
|
||||
echo ""
|
||||
|
||||
echo "Next steps:"
|
||||
echo "1. Restart Celery services to pick up new tasks:"
|
||||
echo " docker-compose restart celery celery-beat"
|
||||
echo ""
|
||||
echo "2. Access the frontend at /automation page"
|
||||
echo ""
|
||||
echo "3. Test the automation:"
|
||||
echo " - Click [Configure] to set up schedule"
|
||||
echo " - Click [Run Now] to start automation"
|
||||
echo " - Monitor progress in real-time"
|
||||
echo ""
|
||||
echo "4. Check logs:"
|
||||
echo " tail -f logs/automation/{account_id}/{site_id}/{run_id}/automation_run.log"
|
||||
echo ""
|
||||
echo -e "${YELLOW}For troubleshooting, see: AUTOMATION-DEPLOYMENT-CHECKLIST.md${NC}"
|
||||
4
backend/igny8_core/business/automation/__init__.py
Normal file
4
backend/igny8_core/business/automation/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""
|
||||
Automation Business Logic
|
||||
Orchestrates AI functions into automated pipelines
|
||||
"""
|
||||
@@ -0,0 +1,89 @@
|
||||
# Generated migration for automation models
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('system', '__latest__'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AutomationConfig',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('is_enabled', models.BooleanField(default=False, help_text='Enable/disable automation for this site')),
|
||||
('frequency', models.CharField(
|
||||
choices=[('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly')],
|
||||
default='daily',
|
||||
max_length=20
|
||||
)),
|
||||
('scheduled_time', models.TimeField(default='02:00', help_text='Time of day to run automation (HH:MM)')),
|
||||
('stage_1_batch_size', models.IntegerField(default=20, help_text='Keywords → Clusters batch size')),
|
||||
('stage_2_batch_size', models.IntegerField(default=1, help_text='Clusters → Ideas batch size')),
|
||||
('stage_3_batch_size', models.IntegerField(default=20, help_text='Ideas → Tasks batch size')),
|
||||
('stage_4_batch_size', models.IntegerField(default=1, help_text='Tasks → Content batch size')),
|
||||
('stage_5_batch_size', models.IntegerField(default=1, help_text='Content → Image Prompts batch size')),
|
||||
('stage_6_batch_size', models.IntegerField(default=1, help_text='Image Prompts → Images batch size')),
|
||||
('last_run_at', models.DateTimeField(blank=True, null=True)),
|
||||
('next_run_at', models.DateTimeField(blank=True, null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='system.account')),
|
||||
('site', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='automation_config', to='system.site')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'automation_config',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AutomationRun',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('run_id', models.CharField(max_length=100, unique=True)),
|
||||
('trigger_type', models.CharField(
|
||||
choices=[('manual', 'Manual'), ('scheduled', 'Scheduled')],
|
||||
default='manual',
|
||||
max_length=20
|
||||
)),
|
||||
('status', models.CharField(
|
||||
choices=[
|
||||
('running', 'Running'),
|
||||
('paused', 'Paused'),
|
||||
('completed', 'Completed'),
|
||||
('failed', 'Failed')
|
||||
],
|
||||
default='running',
|
||||
max_length=20
|
||||
)),
|
||||
('current_stage', models.IntegerField(default=1, help_text='Current stage (1-7)')),
|
||||
('stage_1_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_2_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_3_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_4_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_5_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_6_result', models.JSONField(blank=True, null=True)),
|
||||
('stage_7_result', models.JSONField(blank=True, null=True)),
|
||||
('total_credits_used', models.IntegerField(default=0)),
|
||||
('error_message', models.TextField(blank=True, null=True)),
|
||||
('started_at', models.DateTimeField(auto_now_add=True)),
|
||||
('completed_at', models.DateTimeField(blank=True, null=True)),
|
||||
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='system.account')),
|
||||
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='automation_runs', to='system.site')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'automation_run',
|
||||
'ordering': ['-started_at'],
|
||||
'indexes': [
|
||||
models.Index(fields=['site', 'status'], name='automation_site_status_idx'),
|
||||
models.Index(fields=['site', 'started_at'], name='automation_site_started_idx'),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1 @@
|
||||
"""Automation migrations"""
|
||||
104
backend/igny8_core/business/automation/models.py
Normal file
104
backend/igny8_core/business/automation/models.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""
|
||||
Automation Models
|
||||
Tracks automation runs and configuration
|
||||
"""
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from igny8_core.modules.system.models import Account, Site
|
||||
|
||||
|
||||
class AutomationConfig(models.Model):
|
||||
"""Per-site automation configuration"""
|
||||
|
||||
FREQUENCY_CHOICES = [
|
||||
('daily', 'Daily'),
|
||||
('weekly', 'Weekly'),
|
||||
('monthly', 'Monthly'),
|
||||
]
|
||||
|
||||
account = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='automation_configs')
|
||||
site = models.OneToOneField(Site, on_delete=models.CASCADE, related_name='automation_config')
|
||||
|
||||
is_enabled = models.BooleanField(default=False, help_text="Whether scheduled automation is active")
|
||||
frequency = models.CharField(max_length=20, choices=FREQUENCY_CHOICES, default='daily')
|
||||
scheduled_time = models.TimeField(default='02:00', help_text="Time to run (e.g., 02:00)")
|
||||
|
||||
# Batch sizes per stage
|
||||
stage_1_batch_size = models.IntegerField(default=20, help_text="Keywords per batch")
|
||||
stage_2_batch_size = models.IntegerField(default=1, help_text="Clusters at a time")
|
||||
stage_3_batch_size = models.IntegerField(default=20, help_text="Ideas per batch")
|
||||
stage_4_batch_size = models.IntegerField(default=1, help_text="Tasks - sequential")
|
||||
stage_5_batch_size = models.IntegerField(default=1, help_text="Content at a time")
|
||||
stage_6_batch_size = models.IntegerField(default=1, help_text="Images - sequential")
|
||||
|
||||
last_run_at = models.DateTimeField(null=True, blank=True)
|
||||
next_run_at = models.DateTimeField(null=True, blank=True, help_text="Calculated based on frequency")
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'igny8_automation_configs'
|
||||
verbose_name = 'Automation Config'
|
||||
verbose_name_plural = 'Automation Configs'
|
||||
indexes = [
|
||||
models.Index(fields=['is_enabled', 'next_run_at']),
|
||||
models.Index(fields=['account', 'site']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Automation Config: {self.site.domain} ({self.frequency})"
|
||||
|
||||
|
||||
class AutomationRun(models.Model):
|
||||
"""Tracks each automation execution"""
|
||||
|
||||
TRIGGER_TYPE_CHOICES = [
|
||||
('manual', 'Manual'),
|
||||
('scheduled', 'Scheduled'),
|
||||
]
|
||||
|
||||
STATUS_CHOICES = [
|
||||
('running', 'Running'),
|
||||
('paused', 'Paused'),
|
||||
('completed', 'Completed'),
|
||||
('failed', 'Failed'),
|
||||
]
|
||||
|
||||
run_id = models.CharField(max_length=100, unique=True, db_index=True, help_text="Format: run_20251203_140523_manual")
|
||||
account = models.ForeignKey(Account, on_delete=models.CASCADE, related_name='automation_runs')
|
||||
site = models.ForeignKey(Site, on_delete=models.CASCADE, related_name='automation_runs')
|
||||
|
||||
trigger_type = models.CharField(max_length=20, choices=TRIGGER_TYPE_CHOICES)
|
||||
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='running', db_index=True)
|
||||
current_stage = models.IntegerField(default=1, help_text="Current stage number (1-7)")
|
||||
|
||||
started_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
total_credits_used = models.IntegerField(default=0)
|
||||
|
||||
# JSON results per stage
|
||||
stage_1_result = models.JSONField(null=True, blank=True, help_text="{keywords_processed, clusters_created, batches}")
|
||||
stage_2_result = models.JSONField(null=True, blank=True, help_text="{clusters_processed, ideas_created}")
|
||||
stage_3_result = models.JSONField(null=True, blank=True, help_text="{ideas_processed, tasks_created}")
|
||||
stage_4_result = models.JSONField(null=True, blank=True, help_text="{tasks_processed, content_created, total_words}")
|
||||
stage_5_result = models.JSONField(null=True, blank=True, help_text="{content_processed, prompts_created}")
|
||||
stage_6_result = models.JSONField(null=True, blank=True, help_text="{images_processed, images_generated}")
|
||||
stage_7_result = models.JSONField(null=True, blank=True, help_text="{ready_for_review}")
|
||||
|
||||
error_message = models.TextField(null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'igny8_automation_runs'
|
||||
verbose_name = 'Automation Run'
|
||||
verbose_name_plural = 'Automation Runs'
|
||||
ordering = ['-started_at']
|
||||
indexes = [
|
||||
models.Index(fields=['site', '-started_at']),
|
||||
models.Index(fields=['status', '-started_at']),
|
||||
models.Index(fields=['account', '-started_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.run_id} - {self.site.domain} ({self.status})"
|
||||
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Automation Services
|
||||
"""
|
||||
from .automation_service import AutomationService
|
||||
from .automation_logger import AutomationLogger
|
||||
|
||||
__all__ = ['AutomationService', 'AutomationLogger']
|
||||
@@ -0,0 +1,153 @@
|
||||
"""
|
||||
Automation Logger Service
|
||||
Handles file-based logging for automation runs
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutomationLogger:
|
||||
"""File-based logging for automation runs"""
|
||||
|
||||
def __init__(self, base_log_dir: str = 'logs/automation'):
|
||||
self.base_log_dir = base_log_dir
|
||||
|
||||
def start_run(self, account_id: int, site_id: int, trigger_type: str) -> str:
|
||||
"""
|
||||
Create log directory structure and return run_id
|
||||
|
||||
Returns:
|
||||
run_id in format: run_20251203_140523_manual
|
||||
"""
|
||||
# Generate run_id
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
run_id = f"run_{timestamp}_{trigger_type}"
|
||||
|
||||
# Create directory structure
|
||||
run_dir = self._get_run_dir(account_id, site_id, run_id)
|
||||
os.makedirs(run_dir, exist_ok=True)
|
||||
|
||||
# Create main log file
|
||||
log_file = os.path.join(run_dir, 'automation_run.log')
|
||||
with open(log_file, 'w') as f:
|
||||
f.write("=" * 80 + "\n")
|
||||
f.write(f"AUTOMATION RUN: {run_id}\n")
|
||||
f.write(f"Started: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
||||
f.write(f"Trigger: {trigger_type}\n")
|
||||
f.write(f"Account: {account_id}\n")
|
||||
f.write(f"Site: {site_id}\n")
|
||||
f.write("=" * 80 + "\n\n")
|
||||
|
||||
logger.info(f"[AutomationLogger] Created run: {run_id}")
|
||||
return run_id
|
||||
|
||||
def log_stage_start(self, run_id: str, account_id: int, site_id: int, stage_number: int, stage_name: str, pending_count: int):
|
||||
"""Log stage start"""
|
||||
timestamp = self._timestamp()
|
||||
|
||||
# Main log
|
||||
self._append_to_main_log(account_id, site_id, run_id,
|
||||
f"{timestamp} - Stage {stage_number} starting: {stage_name}")
|
||||
self._append_to_main_log(account_id, site_id, run_id,
|
||||
f"{timestamp} - Stage {stage_number}: Found {pending_count} pending items")
|
||||
|
||||
# Stage-specific log
|
||||
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
|
||||
with open(stage_log, 'w') as f:
|
||||
f.write("=" * 80 + "\n")
|
||||
f.write(f"STAGE {stage_number}: {stage_name}\n")
|
||||
f.write(f"Started: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
||||
f.write("=" * 80 + "\n\n")
|
||||
f.write(f"{timestamp} - Found {pending_count} pending items\n")
|
||||
|
||||
def log_stage_progress(self, run_id: str, account_id: int, site_id: int, stage_number: int, message: str):
|
||||
"""Log stage progress"""
|
||||
timestamp = self._timestamp()
|
||||
log_message = f"{timestamp} - Stage {stage_number}: {message}"
|
||||
|
||||
# Main log
|
||||
self._append_to_main_log(account_id, site_id, run_id, log_message)
|
||||
|
||||
# Stage-specific log
|
||||
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
|
||||
with open(stage_log, 'a') as f:
|
||||
f.write(f"{log_message}\n")
|
||||
|
||||
def log_stage_complete(self, run_id: str, account_id: int, site_id: int, stage_number: int,
|
||||
processed_count: int, time_elapsed: str, credits_used: int):
|
||||
"""Log stage completion"""
|
||||
timestamp = self._timestamp()
|
||||
|
||||
# Main log
|
||||
self._append_to_main_log(account_id, site_id, run_id,
|
||||
f"{timestamp} - Stage {stage_number} complete: {processed_count} items processed")
|
||||
|
||||
# Stage-specific log
|
||||
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
|
||||
with open(stage_log, 'a') as f:
|
||||
f.write("\n" + "=" * 80 + "\n")
|
||||
f.write(f"STAGE {stage_number} COMPLETE\n")
|
||||
f.write(f"Total Time: {time_elapsed}\n")
|
||||
f.write(f"Processed: {processed_count} items\n")
|
||||
f.write(f"Credits Used: {credits_used}\n")
|
||||
f.write("=" * 80 + "\n")
|
||||
|
||||
def log_stage_error(self, run_id: str, account_id: int, site_id: int, stage_number: int, error_message: str):
|
||||
"""Log stage error"""
|
||||
timestamp = self._timestamp()
|
||||
log_message = f"{timestamp} - Stage {stage_number} ERROR: {error_message}"
|
||||
|
||||
# Main log
|
||||
self._append_to_main_log(account_id, site_id, run_id, log_message)
|
||||
|
||||
# Stage-specific log
|
||||
stage_log = self._get_stage_log_path(account_id, site_id, run_id, stage_number)
|
||||
with open(stage_log, 'a') as f:
|
||||
f.write(f"\n{log_message}\n")
|
||||
|
||||
def get_activity_log(self, account_id: int, site_id: int, run_id: str, last_n: int = 50) -> List[str]:
|
||||
"""
|
||||
Get last N lines from main activity log
|
||||
|
||||
Returns:
|
||||
List of log lines (newest first)
|
||||
"""
|
||||
log_file = os.path.join(self._get_run_dir(account_id, site_id, run_id), 'automation_run.log')
|
||||
|
||||
if not os.path.exists(log_file):
|
||||
return []
|
||||
|
||||
with open(log_file, 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
# Filter out header lines and empty lines
|
||||
activity_lines = [line.strip() for line in lines if line.strip() and not line.startswith('=')]
|
||||
|
||||
# Return last N lines (newest first)
|
||||
return list(reversed(activity_lines[-last_n:]))
|
||||
|
||||
# Helper methods
|
||||
|
||||
def _get_run_dir(self, account_id: int, site_id: int, run_id: str) -> str:
|
||||
"""Get run directory path"""
|
||||
return os.path.join(self.base_log_dir, str(account_id), str(site_id), run_id)
|
||||
|
||||
def _get_stage_log_path(self, account_id: int, site_id: int, run_id: str, stage_number: int) -> str:
|
||||
"""Get stage log file path"""
|
||||
run_dir = self._get_run_dir(account_id, site_id, run_id)
|
||||
return os.path.join(run_dir, f'stage_{stage_number}.log')
|
||||
|
||||
def _append_to_main_log(self, account_id: int, site_id: int, run_id: str, message: str):
|
||||
"""Append message to main log file"""
|
||||
log_file = os.path.join(self._get_run_dir(account_id, site_id, run_id), 'automation_run.log')
|
||||
with open(log_file, 'a') as f:
|
||||
f.write(f"{message}\n")
|
||||
|
||||
def _timestamp(self) -> str:
|
||||
"""Get formatted timestamp"""
|
||||
return datetime.now().strftime('%H:%M:%S')
|
||||
@@ -0,0 +1,824 @@
|
||||
"""
|
||||
Automation Service
|
||||
Core orchestrator that executes AI function stages sequentially
|
||||
"""
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Optional, List
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Q, F
|
||||
from django.core.cache import cache
|
||||
from celery.result import AsyncResult
|
||||
|
||||
from igny8_core.business.automation.models import AutomationRun, AutomationConfig
|
||||
from igny8_core.business.automation.services.automation_logger import AutomationLogger
|
||||
from igny8_core.modules.system.models import Account, Site
|
||||
from igny8_core.modules.planner.models import Keywords, Clusters, ContentIdeas
|
||||
from igny8_core.modules.writer.models import Tasks, Content, Images
|
||||
from igny8_core.business.content.models import AIUsageLog
|
||||
|
||||
# AI Functions
|
||||
from igny8_core.ai.functions.auto_cluster import AutoCluster
|
||||
from igny8_core.ai.functions.generate_ideas import GenerateIdeas
|
||||
from igny8_core.ai.functions.generate_content import GenerateContent
|
||||
from igny8_core.ai.functions.generate_image_prompts import GenerateImagePromptsFunction
|
||||
from igny8_core.ai.functions.generate_images import GenerateImages
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AutomationService:
|
||||
"""Orchestrates AI functions into automated pipeline"""
|
||||
|
||||
def __init__(self, account: Account, site: Site):
|
||||
self.account = account
|
||||
self.site = site
|
||||
self.logger = AutomationLogger()
|
||||
self.run = None
|
||||
self.config = None
|
||||
|
||||
# Load or create config
|
||||
self.config, _ = AutomationConfig.objects.get_or_create(
|
||||
account=account,
|
||||
site=site,
|
||||
defaults={
|
||||
'is_enabled': False,
|
||||
'frequency': 'daily',
|
||||
'scheduled_time': '02:00',
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_run_id(cls, run_id: str) -> 'AutomationService':
|
||||
"""Create service instance from run_id"""
|
||||
run = AutomationRun.objects.get(run_id=run_id)
|
||||
service = cls(run.account, run.site)
|
||||
service.run = run
|
||||
return service
|
||||
|
||||
def start_automation(self, trigger_type: str = 'manual') -> str:
|
||||
"""
|
||||
Start automation run
|
||||
|
||||
Returns:
|
||||
run_id
|
||||
"""
|
||||
# Check for concurrent run
|
||||
if AutomationRun.objects.filter(site=self.site, status='running').exists():
|
||||
raise ValueError("Automation already running for this site")
|
||||
|
||||
# Acquire distributed lock
|
||||
lock_key = f'automation_lock_{self.site.id}'
|
||||
if not cache.add(lock_key, 'locked', timeout=21600): # 6 hours
|
||||
raise ValueError("Automation already running for this site (cache lock)")
|
||||
|
||||
try:
|
||||
# Estimate credits needed
|
||||
estimated_credits = self.estimate_credits()
|
||||
|
||||
# Check credit balance (with 20% buffer)
|
||||
required_credits = int(estimated_credits * 1.2)
|
||||
if self.account.credits_balance < required_credits:
|
||||
raise ValueError(f"Insufficient credits. Need ~{required_credits}, you have {self.account.credits_balance}")
|
||||
|
||||
# Create run_id and log files
|
||||
run_id = self.logger.start_run(self.account.id, self.site.id, trigger_type)
|
||||
|
||||
# Create AutomationRun record
|
||||
self.run = AutomationRun.objects.create(
|
||||
run_id=run_id,
|
||||
account=self.account,
|
||||
site=self.site,
|
||||
trigger_type=trigger_type,
|
||||
status='running',
|
||||
current_stage=1,
|
||||
)
|
||||
|
||||
# Log start
|
||||
self.logger.log_stage_progress(
|
||||
run_id, self.account.id, self.site.id, 0,
|
||||
f"Automation started (trigger: {trigger_type})"
|
||||
)
|
||||
self.logger.log_stage_progress(
|
||||
run_id, self.account.id, self.site.id, 0,
|
||||
f"Credit check: Account has {self.account.credits_balance} credits, estimated need: {estimated_credits} credits"
|
||||
)
|
||||
|
||||
logger.info(f"[AutomationService] Started run: {run_id}")
|
||||
return run_id
|
||||
|
||||
except Exception as e:
|
||||
# Release lock on failure
|
||||
cache.delete(lock_key)
|
||||
raise
|
||||
|
||||
def run_stage_1(self):
|
||||
"""Stage 1: Keywords → Clusters"""
|
||||
stage_number = 1
|
||||
stage_name = "Keywords → Clusters (AI)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query pending keywords
|
||||
pending_keywords = Keywords.objects.filter(
|
||||
site=self.site,
|
||||
status='new',
|
||||
cluster__isnull=True,
|
||||
disabled=False
|
||||
)
|
||||
|
||||
total_count = pending_keywords.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No keywords to process - skipping stage"
|
||||
)
|
||||
self.run.stage_1_result = {'keywords_processed': 0, 'clusters_created': 0, 'batches_run': 0, 'credits_used': 0}
|
||||
self.run.current_stage = 2
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process in batches
|
||||
batch_size = self.config.stage_1_batch_size
|
||||
keywords_processed = 0
|
||||
clusters_created = 0
|
||||
batches_run = 0
|
||||
credits_before = self._get_credits_used()
|
||||
|
||||
keyword_ids = list(pending_keywords.values_list('id', flat=True))
|
||||
|
||||
for i in range(0, len(keyword_ids), batch_size):
|
||||
batch = keyword_ids[i:i + batch_size]
|
||||
batch_num = (i // batch_size) + 1
|
||||
total_batches = (len(keyword_ids) + batch_size - 1) // batch_size
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Processing batch {batch_num}/{total_batches} ({len(batch)} keywords)"
|
||||
)
|
||||
|
||||
# Call AI function
|
||||
result = AutoCluster().execute(
|
||||
payload={'ids': batch},
|
||||
account=self.account
|
||||
)
|
||||
|
||||
# Monitor task
|
||||
task_id = result.get('task_id')
|
||||
if task_id:
|
||||
self._wait_for_task(task_id, stage_number, f"Batch {batch_num}")
|
||||
|
||||
keywords_processed += len(batch)
|
||||
batches_run += 1
|
||||
|
||||
# Log progress
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Batch {batch_num} complete"
|
||||
)
|
||||
|
||||
# Get clusters created count
|
||||
clusters_created = Clusters.objects.filter(
|
||||
site=self.site,
|
||||
created_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
# Calculate credits used
|
||||
credits_used = self._get_credits_used() - credits_before
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, keywords_processed, time_elapsed, credits_used
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_1_result = {
|
||||
'keywords_processed': keywords_processed,
|
||||
'clusters_created': clusters_created,
|
||||
'batches_run': batches_run,
|
||||
'credits_used': credits_used
|
||||
}
|
||||
self.run.current_stage = 2
|
||||
self.run.total_credits_used += credits_used
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 1 complete: {keywords_processed} keywords → {clusters_created} clusters")
|
||||
|
||||
def run_stage_2(self):
|
||||
"""Stage 2: Clusters → Ideas"""
|
||||
stage_number = 2
|
||||
stage_name = "Clusters → Ideas (AI)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query clusters without ideas
|
||||
pending_clusters = Clusters.objects.filter(
|
||||
site=self.site,
|
||||
status='new',
|
||||
disabled=False
|
||||
).exclude(
|
||||
ideas__isnull=False
|
||||
)
|
||||
|
||||
total_count = pending_clusters.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No clusters to process - skipping stage"
|
||||
)
|
||||
self.run.stage_2_result = {'clusters_processed': 0, 'ideas_created': 0, 'credits_used': 0}
|
||||
self.run.current_stage = 3
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process one at a time
|
||||
clusters_processed = 0
|
||||
credits_before = self._get_credits_used()
|
||||
|
||||
for cluster in pending_clusters:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Generating ideas for cluster: {cluster.name}"
|
||||
)
|
||||
|
||||
# Call AI function
|
||||
result = GenerateIdeas().execute(
|
||||
payload={'ids': [cluster.id]},
|
||||
account=self.account
|
||||
)
|
||||
|
||||
# Monitor task
|
||||
task_id = result.get('task_id')
|
||||
if task_id:
|
||||
self._wait_for_task(task_id, stage_number, f"Cluster '{cluster.name}'")
|
||||
|
||||
clusters_processed += 1
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Cluster '{cluster.name}' complete"
|
||||
)
|
||||
|
||||
# Get ideas created count
|
||||
ideas_created = ContentIdeas.objects.filter(
|
||||
site=self.site,
|
||||
created_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
# Calculate credits used
|
||||
credits_used = self._get_credits_used() - credits_before
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, clusters_processed, time_elapsed, credits_used
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_2_result = {
|
||||
'clusters_processed': clusters_processed,
|
||||
'ideas_created': ideas_created,
|
||||
'credits_used': credits_used
|
||||
}
|
||||
self.run.current_stage = 3
|
||||
self.run.total_credits_used += credits_used
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 2 complete: {clusters_processed} clusters → {ideas_created} ideas")
|
||||
|
||||
def run_stage_3(self):
|
||||
"""Stage 3: Ideas → Tasks (Local Queue)"""
|
||||
stage_number = 3
|
||||
stage_name = "Ideas → Tasks (Local Queue)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query pending ideas
|
||||
pending_ideas = ContentIdeas.objects.filter(
|
||||
site=self.site,
|
||||
status='new'
|
||||
)
|
||||
|
||||
total_count = pending_ideas.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No ideas to process - skipping stage"
|
||||
)
|
||||
self.run.stage_3_result = {'ideas_processed': 0, 'tasks_created': 0, 'batches_run': 0}
|
||||
self.run.current_stage = 4
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process in batches
|
||||
batch_size = self.config.stage_3_batch_size
|
||||
ideas_processed = 0
|
||||
tasks_created = 0
|
||||
batches_run = 0
|
||||
|
||||
idea_list = list(pending_ideas)
|
||||
|
||||
for i in range(0, len(idea_list), batch_size):
|
||||
batch = idea_list[i:i + batch_size]
|
||||
batch_num = (i // batch_size) + 1
|
||||
total_batches = (len(idea_list) + batch_size - 1) // batch_size
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Queueing batch {batch_num}/{total_batches} ({len(batch)} ideas)"
|
||||
)
|
||||
|
||||
# Create tasks (local operation)
|
||||
for idea in batch:
|
||||
# Build keywords string
|
||||
keywords_str = ''
|
||||
if idea.keyword_objects.exists():
|
||||
keywords_str = ', '.join([kw.keyword for kw in idea.keyword_objects.all()])
|
||||
elif idea.target_keywords:
|
||||
keywords_str = idea.target_keywords
|
||||
|
||||
# Create task
|
||||
task = Tasks.objects.create(
|
||||
title=idea.idea_title,
|
||||
description=idea.description or '',
|
||||
cluster=idea.keyword_cluster,
|
||||
content_type=idea.content_type or 'post',
|
||||
content_structure=idea.content_structure or 'article',
|
||||
keywords=keywords_str,
|
||||
status='queued',
|
||||
account=idea.account,
|
||||
site=idea.site,
|
||||
sector=idea.sector,
|
||||
idea=idea,
|
||||
)
|
||||
|
||||
# Update idea status
|
||||
idea.status = 'queued'
|
||||
idea.save()
|
||||
|
||||
tasks_created += 1
|
||||
|
||||
ideas_processed += len(batch)
|
||||
batches_run += 1
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Batch {batch_num} complete: {len(batch)} tasks created"
|
||||
)
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, ideas_processed, time_elapsed, 0 # No credits for local operation
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_3_result = {
|
||||
'ideas_processed': ideas_processed,
|
||||
'tasks_created': tasks_created,
|
||||
'batches_run': batches_run
|
||||
}
|
||||
self.run.current_stage = 4
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 3 complete: {ideas_processed} ideas → {tasks_created} tasks")
|
||||
|
||||
def run_stage_4(self):
|
||||
"""Stage 4: Tasks → Content"""
|
||||
stage_number = 4
|
||||
stage_name = "Tasks → Content (AI)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query queued tasks
|
||||
pending_tasks = Tasks.objects.filter(
|
||||
site=self.site,
|
||||
status='queued',
|
||||
content__isnull=True
|
||||
)
|
||||
|
||||
total_count = pending_tasks.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No tasks to process - skipping stage"
|
||||
)
|
||||
self.run.stage_4_result = {'tasks_processed': 0, 'content_created': 0, 'total_words': 0, 'credits_used': 0}
|
||||
self.run.current_stage = 5
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process one at a time
|
||||
tasks_processed = 0
|
||||
credits_before = self._get_credits_used()
|
||||
|
||||
for task in pending_tasks:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Generating content for task: {task.title}"
|
||||
)
|
||||
|
||||
# Call AI function
|
||||
result = GenerateContent().execute(
|
||||
payload={'ids': [task.id]},
|
||||
account=self.account
|
||||
)
|
||||
|
||||
# Monitor task
|
||||
task_id = result.get('task_id')
|
||||
if task_id:
|
||||
self._wait_for_task(task_id, stage_number, f"Task '{task.title}'")
|
||||
|
||||
tasks_processed += 1
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Task '{task.title}' complete"
|
||||
)
|
||||
|
||||
# Get content created count and total words
|
||||
content_created = Content.objects.filter(
|
||||
site=self.site,
|
||||
created_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
total_words = Content.objects.filter(
|
||||
site=self.site,
|
||||
created_at__gte=self.run.started_at
|
||||
).aggregate(total=Count('id'))['total'] * 2500 # Estimate
|
||||
|
||||
# Calculate credits used
|
||||
credits_used = self._get_credits_used() - credits_before
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, tasks_processed, time_elapsed, credits_used
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_4_result = {
|
||||
'tasks_processed': tasks_processed,
|
||||
'content_created': content_created,
|
||||
'total_words': total_words,
|
||||
'credits_used': credits_used
|
||||
}
|
||||
self.run.current_stage = 5
|
||||
self.run.total_credits_used += credits_used
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 4 complete: {tasks_processed} tasks → {content_created} content")
|
||||
|
||||
def run_stage_5(self):
|
||||
"""Stage 5: Content → Image Prompts"""
|
||||
stage_number = 5
|
||||
stage_name = "Content → Image Prompts (AI)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query content without Images records
|
||||
content_without_images = Content.objects.filter(
|
||||
site=self.site,
|
||||
status='draft'
|
||||
).annotate(
|
||||
images_count=Count('images')
|
||||
).filter(
|
||||
images_count=0
|
||||
)
|
||||
|
||||
total_count = content_without_images.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No content to process - skipping stage"
|
||||
)
|
||||
self.run.stage_5_result = {'content_processed': 0, 'prompts_created': 0, 'credits_used': 0}
|
||||
self.run.current_stage = 6
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process one at a time
|
||||
content_processed = 0
|
||||
credits_before = self._get_credits_used()
|
||||
|
||||
for content in content_without_images:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Extracting prompts from: {content.title}"
|
||||
)
|
||||
|
||||
# Call AI function
|
||||
result = GenerateImagePromptsFunction().execute(
|
||||
payload={'ids': [content.id]},
|
||||
account=self.account
|
||||
)
|
||||
|
||||
# Monitor task
|
||||
task_id = result.get('task_id')
|
||||
if task_id:
|
||||
self._wait_for_task(task_id, stage_number, f"Content '{content.title}'")
|
||||
|
||||
content_processed += 1
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Content '{content.title}' complete"
|
||||
)
|
||||
|
||||
# Get prompts created count
|
||||
prompts_created = Images.objects.filter(
|
||||
site=self.site,
|
||||
status='pending',
|
||||
created_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
# Calculate credits used
|
||||
credits_used = self._get_credits_used() - credits_before
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, content_processed, time_elapsed, credits_used
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_5_result = {
|
||||
'content_processed': content_processed,
|
||||
'prompts_created': prompts_created,
|
||||
'credits_used': credits_used
|
||||
}
|
||||
self.run.current_stage = 6
|
||||
self.run.total_credits_used += credits_used
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 5 complete: {content_processed} content → {prompts_created} prompts")
|
||||
|
||||
def run_stage_6(self):
|
||||
"""Stage 6: Image Prompts → Generated Images"""
|
||||
stage_number = 6
|
||||
stage_name = "Images (Prompts) → Generated Images (AI)"
|
||||
start_time = time.time()
|
||||
|
||||
# Query pending images
|
||||
pending_images = Images.objects.filter(
|
||||
site=self.site,
|
||||
status='pending'
|
||||
)
|
||||
|
||||
total_count = pending_images.count()
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, "No images to process - skipping stage"
|
||||
)
|
||||
self.run.stage_6_result = {'images_processed': 0, 'images_generated': 0, 'content_moved_to_review': 0, 'credits_used': 0}
|
||||
self.run.current_stage = 7
|
||||
self.run.save()
|
||||
return
|
||||
|
||||
# Process one at a time
|
||||
images_processed = 0
|
||||
credits_before = self._get_credits_used()
|
||||
|
||||
for image in pending_images:
|
||||
content_title = image.content.title if image.content else 'Unknown'
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Generating image: {image.image_type} for '{content_title}'"
|
||||
)
|
||||
|
||||
# Call AI function
|
||||
result = GenerateImages().execute(
|
||||
payload={'image_ids': [image.id]},
|
||||
account=self.account
|
||||
)
|
||||
|
||||
# Monitor task
|
||||
task_id = result.get('task_id')
|
||||
if task_id:
|
||||
self._wait_for_task(task_id, stage_number, f"Image for '{content_title}'")
|
||||
|
||||
images_processed += 1
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Image generated for '{content_title}'"
|
||||
)
|
||||
|
||||
# Get images generated count
|
||||
images_generated = Images.objects.filter(
|
||||
site=self.site,
|
||||
status='generated',
|
||||
updated_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
# Count content moved to review (automatic side effect)
|
||||
content_moved_to_review = Content.objects.filter(
|
||||
site=self.site,
|
||||
status='review',
|
||||
updated_at__gte=self.run.started_at
|
||||
).count()
|
||||
|
||||
# Calculate credits used
|
||||
credits_used = self._get_credits_used() - credits_before
|
||||
|
||||
# Calculate time elapsed
|
||||
time_elapsed = self._format_time_elapsed(start_time)
|
||||
|
||||
# Log completion
|
||||
self.logger.log_stage_complete(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, images_processed, time_elapsed, credits_used
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_6_result = {
|
||||
'images_processed': images_processed,
|
||||
'images_generated': images_generated,
|
||||
'content_moved_to_review': content_moved_to_review,
|
||||
'credits_used': credits_used
|
||||
}
|
||||
self.run.current_stage = 7
|
||||
self.run.total_credits_used += credits_used
|
||||
self.run.save()
|
||||
|
||||
logger.info(f"[AutomationService] Stage 6 complete: {images_processed} images generated, {content_moved_to_review} content moved to review")
|
||||
|
||||
def run_stage_7(self):
|
||||
"""Stage 7: Manual Review Gate (Count Only)"""
|
||||
stage_number = 7
|
||||
stage_name = "Manual Review Gate"
|
||||
|
||||
# Query content ready for review
|
||||
ready_for_review = Content.objects.filter(
|
||||
site=self.site,
|
||||
status='review'
|
||||
)
|
||||
|
||||
total_count = ready_for_review.count()
|
||||
content_ids = list(ready_for_review.values_list('id', flat=True))
|
||||
|
||||
# Log stage start
|
||||
self.logger.log_stage_start(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, stage_name, total_count
|
||||
)
|
||||
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Automation complete. {total_count} content pieces ready for review"
|
||||
)
|
||||
|
||||
if content_ids:
|
||||
self.logger.log_stage_progress(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, f"Content IDs ready: {content_ids[:10]}..." if len(content_ids) > 10 else f"Content IDs ready: {content_ids}"
|
||||
)
|
||||
|
||||
# Save results
|
||||
self.run.stage_7_result = {
|
||||
'ready_for_review': total_count,
|
||||
'content_ids': content_ids
|
||||
}
|
||||
self.run.status = 'completed'
|
||||
self.run.completed_at = datetime.now()
|
||||
self.run.save()
|
||||
|
||||
# Release lock
|
||||
cache.delete(f'automation_lock_{self.site.id}')
|
||||
|
||||
logger.info(f"[AutomationService] Stage 7 complete: Automation ended, {total_count} content ready for review")
|
||||
|
||||
def pause_automation(self):
|
||||
"""Pause current automation run"""
|
||||
if self.run:
|
||||
self.run.status = 'paused'
|
||||
self.run.save()
|
||||
logger.info(f"[AutomationService] Paused run: {self.run.run_id}")
|
||||
|
||||
def resume_automation(self):
|
||||
"""Resume paused automation run"""
|
||||
if self.run and self.run.status == 'paused':
|
||||
self.run.status = 'running'
|
||||
self.run.save()
|
||||
logger.info(f"[AutomationService] Resumed run: {self.run.run_id}")
|
||||
|
||||
def estimate_credits(self) -> int:
|
||||
"""Estimate total credits needed for automation"""
|
||||
# Count items
|
||||
keywords_count = Keywords.objects.filter(site=self.site, status='new', cluster__isnull=True).count()
|
||||
clusters_count = Clusters.objects.filter(site=self.site, status='new').exclude(ideas__isnull=False).count()
|
||||
ideas_count = ContentIdeas.objects.filter(site=self.site, status='new').count()
|
||||
tasks_count = Tasks.objects.filter(site=self.site, status='queued', content__isnull=True).count()
|
||||
content_count = Content.objects.filter(site=self.site, status='draft').annotate(images_count=Count('images')).filter(images_count=0).count()
|
||||
|
||||
# Estimate credits
|
||||
clustering_credits = (keywords_count // 5) + 1 # 1 credit per 5 keywords
|
||||
ideas_credits = clusters_count * 2 # 2 credits per cluster
|
||||
content_credits = tasks_count * 5 # Assume 2500 words avg = 5 credits
|
||||
prompts_credits = content_count * 2 # Assume 4 prompts per content = 2 credits
|
||||
images_credits = content_count * 8 # Assume 4 images * 2 credits avg
|
||||
|
||||
total = clustering_credits + ideas_credits + content_credits + prompts_credits + images_credits
|
||||
|
||||
logger.info(f"[AutomationService] Estimated credits: {total}")
|
||||
return total
|
||||
|
||||
# Helper methods
|
||||
|
||||
def _wait_for_task(self, task_id: str, stage_number: int, item_name: str):
|
||||
"""Wait for Celery task to complete"""
|
||||
result = AsyncResult(task_id)
|
||||
|
||||
while not result.ready():
|
||||
time.sleep(3) # Poll every 3 seconds
|
||||
|
||||
# Check for pause
|
||||
self.run.refresh_from_db()
|
||||
if self.run.status == 'paused':
|
||||
logger.info(f"[AutomationService] Paused during {item_name}")
|
||||
# Wait until resumed
|
||||
while self.run.status == 'paused':
|
||||
time.sleep(5)
|
||||
self.run.refresh_from_db()
|
||||
|
||||
if result.failed():
|
||||
error_msg = f"Task failed for {item_name}"
|
||||
self.logger.log_stage_error(
|
||||
self.run.run_id, self.account.id, self.site.id,
|
||||
stage_number, error_msg
|
||||
)
|
||||
raise Exception(error_msg)
|
||||
|
||||
def _get_credits_used(self) -> int:
|
||||
"""Get total credits used by this run so far"""
|
||||
if not self.run:
|
||||
return 0
|
||||
|
||||
total = AIUsageLog.objects.filter(
|
||||
account=self.account,
|
||||
created_at__gte=self.run.started_at
|
||||
).aggregate(total=Count('id'))['total'] or 0
|
||||
|
||||
return total
|
||||
|
||||
def _format_time_elapsed(self, start_time: float) -> str:
|
||||
"""Format elapsed time"""
|
||||
elapsed = time.time() - start_time
|
||||
minutes = int(elapsed // 60)
|
||||
seconds = int(elapsed % 60)
|
||||
return f"{minutes}m {seconds}s"
|
||||
195
backend/igny8_core/business/automation/tasks.py
Normal file
195
backend/igny8_core/business/automation/tasks.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
Automation Celery Tasks
|
||||
Background tasks for automation pipeline
|
||||
"""
|
||||
from celery import shared_task, chain
|
||||
from celery.utils.log import get_task_logger
|
||||
from datetime import datetime, timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
from igny8_core.business.automation.models import AutomationConfig, AutomationRun
|
||||
from igny8_core.business.automation.services import AutomationService
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
@shared_task(name='automation.check_scheduled_automations')
|
||||
def check_scheduled_automations():
|
||||
"""
|
||||
Check for scheduled automation runs (runs every hour)
|
||||
"""
|
||||
logger.info("[AutomationTask] Checking scheduled automations")
|
||||
|
||||
now = timezone.now()
|
||||
current_time = now.time()
|
||||
|
||||
# Find configs that should run now
|
||||
for config in AutomationConfig.objects.filter(is_enabled=True):
|
||||
# Check if it's time to run
|
||||
should_run = False
|
||||
|
||||
if config.frequency == 'daily':
|
||||
# Run if current time matches scheduled_time
|
||||
if current_time.hour == config.scheduled_time.hour and current_time.minute < 60:
|
||||
should_run = True
|
||||
elif config.frequency == 'weekly':
|
||||
# Run on Mondays at scheduled_time
|
||||
if now.weekday() == 0 and current_time.hour == config.scheduled_time.hour and current_time.minute < 60:
|
||||
should_run = True
|
||||
elif config.frequency == 'monthly':
|
||||
# Run on 1st of month at scheduled_time
|
||||
if now.day == 1 and current_time.hour == config.scheduled_time.hour and current_time.minute < 60:
|
||||
should_run = True
|
||||
|
||||
if should_run:
|
||||
# Check if already ran today
|
||||
if config.last_run_at:
|
||||
time_since_last_run = now - config.last_run_at
|
||||
if time_since_last_run < timedelta(hours=23):
|
||||
logger.info(f"[AutomationTask] Skipping site {config.site.id} - already ran today")
|
||||
continue
|
||||
|
||||
# Check if already running
|
||||
if AutomationRun.objects.filter(site=config.site, status='running').exists():
|
||||
logger.info(f"[AutomationTask] Skipping site {config.site.id} - already running")
|
||||
continue
|
||||
|
||||
logger.info(f"[AutomationTask] Starting scheduled automation for site {config.site.id}")
|
||||
|
||||
try:
|
||||
service = AutomationService(config.account, config.site)
|
||||
run_id = service.start_automation(trigger_type='scheduled')
|
||||
|
||||
# Update config
|
||||
config.last_run_at = now
|
||||
config.next_run_at = _calculate_next_run(config, now)
|
||||
config.save()
|
||||
|
||||
# Start async processing
|
||||
run_automation_task.delay(run_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutomationTask] Failed to start automation for site {config.site.id}: {e}")
|
||||
|
||||
|
||||
@shared_task(name='automation.run_automation_task', bind=True, max_retries=0)
|
||||
def run_automation_task(self, run_id: str):
|
||||
"""
|
||||
Run automation pipeline (chains all stages)
|
||||
"""
|
||||
logger.info(f"[AutomationTask] Starting automation run: {run_id}")
|
||||
|
||||
try:
|
||||
service = AutomationService.from_run_id(run_id)
|
||||
|
||||
# Run all stages sequentially
|
||||
service.run_stage_1()
|
||||
service.run_stage_2()
|
||||
service.run_stage_3()
|
||||
service.run_stage_4()
|
||||
service.run_stage_5()
|
||||
service.run_stage_6()
|
||||
service.run_stage_7()
|
||||
|
||||
logger.info(f"[AutomationTask] Completed automation run: {run_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutomationTask] Failed automation run {run_id}: {e}")
|
||||
|
||||
# Mark as failed
|
||||
run = AutomationRun.objects.get(run_id=run_id)
|
||||
run.status = 'failed'
|
||||
run.error_message = str(e)
|
||||
run.completed_at = timezone.now()
|
||||
run.save()
|
||||
|
||||
# Release lock
|
||||
from django.core.cache import cache
|
||||
cache.delete(f'automation_lock_{run.site.id}')
|
||||
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(name='automation.resume_automation_task', bind=True, max_retries=0)
|
||||
def resume_automation_task(self, run_id: str):
|
||||
"""
|
||||
Resume paused automation run from current stage
|
||||
"""
|
||||
logger.info(f"[AutomationTask] Resuming automation run: {run_id}")
|
||||
|
||||
try:
|
||||
service = AutomationService.from_run_id(run_id)
|
||||
run = service.run
|
||||
|
||||
# Continue from current stage
|
||||
stage_methods = [
|
||||
service.run_stage_1,
|
||||
service.run_stage_2,
|
||||
service.run_stage_3,
|
||||
service.run_stage_4,
|
||||
service.run_stage_5,
|
||||
service.run_stage_6,
|
||||
service.run_stage_7,
|
||||
]
|
||||
|
||||
# Run from current_stage to end
|
||||
for stage in range(run.current_stage - 1, 7):
|
||||
stage_methods[stage]()
|
||||
|
||||
logger.info(f"[AutomationTask] Resumed automation run: {run_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[AutomationTask] Failed to resume automation run {run_id}: {e}")
|
||||
|
||||
# Mark as failed
|
||||
run = AutomationRun.objects.get(run_id=run_id)
|
||||
run.status = 'failed'
|
||||
run.error_message = str(e)
|
||||
run.completed_at = timezone.now()
|
||||
run.save()
|
||||
|
||||
# Release lock
|
||||
from django.core.cache import cache
|
||||
cache.delete(f'automation_lock_{run.site.id}')
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def _calculate_next_run(config: AutomationConfig, now: datetime) -> datetime:
|
||||
"""Calculate next run time based on frequency"""
|
||||
if config.frequency == 'daily':
|
||||
next_run = now + timedelta(days=1)
|
||||
next_run = next_run.replace(
|
||||
hour=config.scheduled_time.hour,
|
||||
minute=config.scheduled_time.minute,
|
||||
second=0,
|
||||
microsecond=0
|
||||
)
|
||||
elif config.frequency == 'weekly':
|
||||
# Next Monday
|
||||
days_until_monday = (7 - now.weekday()) % 7
|
||||
if days_until_monday == 0:
|
||||
days_until_monday = 7
|
||||
next_run = now + timedelta(days=days_until_monday)
|
||||
next_run = next_run.replace(
|
||||
hour=config.scheduled_time.hour,
|
||||
minute=config.scheduled_time.minute,
|
||||
second=0,
|
||||
microsecond=0
|
||||
)
|
||||
elif config.frequency == 'monthly':
|
||||
# Next 1st of month
|
||||
if now.month == 12:
|
||||
next_run = now.replace(year=now.year + 1, month=1, day=1)
|
||||
else:
|
||||
next_run = now.replace(month=now.month + 1, day=1)
|
||||
next_run = next_run.replace(
|
||||
hour=config.scheduled_time.hour,
|
||||
minute=config.scheduled_time.minute,
|
||||
second=0,
|
||||
microsecond=0
|
||||
)
|
||||
else:
|
||||
next_run = now + timedelta(days=1)
|
||||
|
||||
return next_run
|
||||
13
backend/igny8_core/business/automation/urls.py
Normal file
13
backend/igny8_core/business/automation/urls.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""
|
||||
Automation URLs
|
||||
"""
|
||||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from igny8_core.business.automation.views import AutomationViewSet
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r'', AutomationViewSet, basename='automation')
|
||||
|
||||
urlpatterns = [
|
||||
path('', include(router.urls)),
|
||||
]
|
||||
313
backend/igny8_core/business/automation/views.py
Normal file
313
backend/igny8_core/business/automation/views.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""
|
||||
Automation API Views
|
||||
REST API endpoints for automation management
|
||||
"""
|
||||
from rest_framework import viewsets, status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone
|
||||
|
||||
from igny8_core.business.automation.models import AutomationConfig, AutomationRun
|
||||
from igny8_core.business.automation.services import AutomationService
|
||||
from igny8_core.modules.system.models import Account, Site
|
||||
|
||||
|
||||
class AutomationViewSet(viewsets.ViewSet):
|
||||
"""API endpoints for automation"""
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def _get_site(self, request):
|
||||
"""Get site from request"""
|
||||
site_id = request.query_params.get('site_id')
|
||||
if not site_id:
|
||||
return None, Response(
|
||||
{'error': 'site_id required'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
site = get_object_or_404(Site, id=site_id, account__user=request.user)
|
||||
return site, None
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def config(self, request):
|
||||
"""
|
||||
GET /api/v1/automation/config/?site_id=123
|
||||
Get automation configuration for site
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
config, _ = AutomationConfig.objects.get_or_create(
|
||||
account=site.account,
|
||||
site=site,
|
||||
defaults={
|
||||
'is_enabled': False,
|
||||
'frequency': 'daily',
|
||||
'scheduled_time': '02:00',
|
||||
}
|
||||
)
|
||||
|
||||
return Response({
|
||||
'is_enabled': config.is_enabled,
|
||||
'frequency': config.frequency,
|
||||
'scheduled_time': str(config.scheduled_time),
|
||||
'stage_1_batch_size': config.stage_1_batch_size,
|
||||
'stage_2_batch_size': config.stage_2_batch_size,
|
||||
'stage_3_batch_size': config.stage_3_batch_size,
|
||||
'stage_4_batch_size': config.stage_4_batch_size,
|
||||
'stage_5_batch_size': config.stage_5_batch_size,
|
||||
'stage_6_batch_size': config.stage_6_batch_size,
|
||||
'last_run_at': config.last_run_at,
|
||||
'next_run_at': config.next_run_at,
|
||||
})
|
||||
|
||||
@action(detail=False, methods=['put'])
|
||||
def update_config(self, request):
|
||||
"""
|
||||
PUT /api/v1/automation/update_config/?site_id=123
|
||||
Update automation configuration
|
||||
|
||||
Body:
|
||||
{
|
||||
"is_enabled": true,
|
||||
"frequency": "daily",
|
||||
"scheduled_time": "02:00",
|
||||
"stage_1_batch_size": 20,
|
||||
...
|
||||
}
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
config, _ = AutomationConfig.objects.get_or_create(
|
||||
account=site.account,
|
||||
site=site
|
||||
)
|
||||
|
||||
# Update fields
|
||||
if 'is_enabled' in request.data:
|
||||
config.is_enabled = request.data['is_enabled']
|
||||
if 'frequency' in request.data:
|
||||
config.frequency = request.data['frequency']
|
||||
if 'scheduled_time' in request.data:
|
||||
config.scheduled_time = request.data['scheduled_time']
|
||||
if 'stage_1_batch_size' in request.data:
|
||||
config.stage_1_batch_size = request.data['stage_1_batch_size']
|
||||
if 'stage_2_batch_size' in request.data:
|
||||
config.stage_2_batch_size = request.data['stage_2_batch_size']
|
||||
if 'stage_3_batch_size' in request.data:
|
||||
config.stage_3_batch_size = request.data['stage_3_batch_size']
|
||||
if 'stage_4_batch_size' in request.data:
|
||||
config.stage_4_batch_size = request.data['stage_4_batch_size']
|
||||
if 'stage_5_batch_size' in request.data:
|
||||
config.stage_5_batch_size = request.data['stage_5_batch_size']
|
||||
if 'stage_6_batch_size' in request.data:
|
||||
config.stage_6_batch_size = request.data['stage_6_batch_size']
|
||||
|
||||
config.save()
|
||||
|
||||
return Response({'message': 'Config updated'})
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def run_now(self, request):
|
||||
"""
|
||||
POST /api/v1/automation/run_now/?site_id=123
|
||||
Trigger automation run immediately
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
try:
|
||||
service = AutomationService(site.account, site)
|
||||
run_id = service.start_automation(trigger_type='manual')
|
||||
|
||||
# Start async processing
|
||||
from igny8_core.business.automation.tasks import run_automation_task
|
||||
run_automation_task.delay(run_id)
|
||||
|
||||
return Response({
|
||||
'run_id': run_id,
|
||||
'message': 'Automation started'
|
||||
})
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
{'error': str(e)},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{'error': f'Failed to start automation: {str(e)}'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def current_run(self, request):
|
||||
"""
|
||||
GET /api/v1/automation/current_run/?site_id=123
|
||||
Get current automation run status
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
run = AutomationRun.objects.filter(
|
||||
site=site,
|
||||
status__in=['running', 'paused']
|
||||
).order_by('-started_at').first()
|
||||
|
||||
if not run:
|
||||
return Response({'run': None})
|
||||
|
||||
return Response({
|
||||
'run': {
|
||||
'run_id': run.run_id,
|
||||
'status': run.status,
|
||||
'current_stage': run.current_stage,
|
||||
'trigger_type': run.trigger_type,
|
||||
'started_at': run.started_at,
|
||||
'total_credits_used': run.total_credits_used,
|
||||
'stage_1_result': run.stage_1_result,
|
||||
'stage_2_result': run.stage_2_result,
|
||||
'stage_3_result': run.stage_3_result,
|
||||
'stage_4_result': run.stage_4_result,
|
||||
'stage_5_result': run.stage_5_result,
|
||||
'stage_6_result': run.stage_6_result,
|
||||
'stage_7_result': run.stage_7_result,
|
||||
}
|
||||
})
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def pause(self, request):
|
||||
"""
|
||||
POST /api/v1/automation/pause/?run_id=abc123
|
||||
Pause automation run
|
||||
"""
|
||||
run_id = request.query_params.get('run_id')
|
||||
if not run_id:
|
||||
return Response(
|
||||
{'error': 'run_id required'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
service = AutomationService.from_run_id(run_id)
|
||||
service.pause_automation()
|
||||
return Response({'message': 'Automation paused'})
|
||||
except AutomationRun.DoesNotExist:
|
||||
return Response(
|
||||
{'error': 'Run not found'},
|
||||
status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def resume(self, request):
|
||||
"""
|
||||
POST /api/v1/automation/resume/?run_id=abc123
|
||||
Resume paused automation run
|
||||
"""
|
||||
run_id = request.query_params.get('run_id')
|
||||
if not run_id:
|
||||
return Response(
|
||||
{'error': 'run_id required'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
service = AutomationService.from_run_id(run_id)
|
||||
service.resume_automation()
|
||||
|
||||
# Resume async processing
|
||||
from igny8_core.business.automation.tasks import resume_automation_task
|
||||
resume_automation_task.delay(run_id)
|
||||
|
||||
return Response({'message': 'Automation resumed'})
|
||||
except AutomationRun.DoesNotExist:
|
||||
return Response(
|
||||
{'error': 'Run not found'},
|
||||
status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def history(self, request):
|
||||
"""
|
||||
GET /api/v1/automation/history/?site_id=123
|
||||
Get automation run history
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
runs = AutomationRun.objects.filter(
|
||||
site=site
|
||||
).order_by('-started_at')[:20]
|
||||
|
||||
return Response({
|
||||
'runs': [
|
||||
{
|
||||
'run_id': run.run_id,
|
||||
'status': run.status,
|
||||
'trigger_type': run.trigger_type,
|
||||
'started_at': run.started_at,
|
||||
'completed_at': run.completed_at,
|
||||
'total_credits_used': run.total_credits_used,
|
||||
'current_stage': run.current_stage,
|
||||
}
|
||||
for run in runs
|
||||
]
|
||||
})
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def logs(self, request):
|
||||
"""
|
||||
GET /api/v1/automation/logs/?run_id=abc123&lines=100
|
||||
Get automation run logs
|
||||
"""
|
||||
run_id = request.query_params.get('run_id')
|
||||
if not run_id:
|
||||
return Response(
|
||||
{'error': 'run_id required'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
run = AutomationRun.objects.get(run_id=run_id)
|
||||
service = AutomationService(run.account, run.site)
|
||||
|
||||
lines = int(request.query_params.get('lines', 100))
|
||||
log_text = service.logger.get_activity_log(
|
||||
run_id, run.account.id, run.site.id, lines
|
||||
)
|
||||
|
||||
return Response({
|
||||
'run_id': run_id,
|
||||
'log': log_text
|
||||
})
|
||||
except AutomationRun.DoesNotExist:
|
||||
return Response(
|
||||
{'error': 'Run not found'},
|
||||
status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def estimate(self, request):
|
||||
"""
|
||||
GET /api/v1/automation/estimate/?site_id=123
|
||||
Estimate credits needed for automation
|
||||
"""
|
||||
site, error_response = self._get_site(request)
|
||||
if error_response:
|
||||
return error_response
|
||||
|
||||
service = AutomationService(site.account, site)
|
||||
estimated_credits = service.estimate_credits()
|
||||
|
||||
return Response({
|
||||
'estimated_credits': estimated_credits,
|
||||
'current_balance': site.account.credits_balance,
|
||||
'sufficient': site.account.credits_balance >= (estimated_credits * 1.2)
|
||||
})
|
||||
@@ -38,6 +38,11 @@ app.conf.beat_schedule = {
|
||||
'task': 'igny8_core.tasks.wordpress_publishing.retry_failed_wordpress_publications',
|
||||
'schedule': crontab(hour='*/6', minute=30), # Every 6 hours at :30
|
||||
},
|
||||
# Automation Tasks
|
||||
'check-scheduled-automations': {
|
||||
'task': 'automation.check_scheduled_automations',
|
||||
'schedule': crontab(minute=0), # Every hour at :00
|
||||
},
|
||||
}
|
||||
|
||||
@app.task(bind=True, ignore_result=True)
|
||||
|
||||
@@ -42,7 +42,7 @@ urlpatterns = [
|
||||
# Site Builder module removed - legacy blueprint functionality deprecated
|
||||
path('api/v1/system/', include('igny8_core.modules.system.urls')),
|
||||
path('api/v1/billing/', include('igny8_core.modules.billing.urls')), # Billing endpoints
|
||||
# path('api/v1/automation/', include('igny8_core.modules.automation.urls')), # Automation endpoints - REMOVED
|
||||
path('api/v1/automation/', include('igny8_core.business.automation.urls')), # Automation endpoints
|
||||
path('api/v1/linker/', include('igny8_core.modules.linker.urls')), # Linker endpoints
|
||||
path('api/v1/optimizer/', include('igny8_core.modules.optimizer.urls')), # Optimizer endpoints
|
||||
path('api/v1/publisher/', include('igny8_core.modules.publisher.urls')), # Publisher endpoints
|
||||
|
||||
Reference in New Issue
Block a user