From f0066b6e7d2b70a9ea045a247008c1fd7ab9b9c7 Mon Sep 17 00:00:00 2001 From: "IGNY8 VPS (Salman)" Date: Sun, 7 Dec 2025 17:40:07 +0000 Subject: [PATCH] copy --- tenant/backend/igny8_core/ai/ai_core.py | 868 +++++++++ tenant/backend/igny8_core/api/base.py | 433 +++++ tenant/backend/igny8_core/api/permissions.py | 180 ++ tenant/backend/igny8_core/api/throttles.py | 146 ++ tenant/backend/igny8_core/auth/middleware.py | 174 ++ tenant/backend/igny8_core/auth/models.py | 647 +++++++ tenant/backend/igny8_core/auth/permissions.py | 77 + tenant/backend/igny8_core/auth/views.py | 1536 +++++++++++++++ .../igny8_core/middleware/request_id.py | 43 + .../igny8_core/middleware/resource_tracker.py | 135 ++ .../igny8_core/modules/billing/views.py | 581 ++++++ .../igny8_core/modules/planner/views.py | 1182 ++++++++++++ .../modules/system/integration_views.py | 1392 ++++++++++++++ .../igny8_core/modules/system/views.py | 782 ++++++++ .../igny8_core/modules/writer/views.py | 1710 +++++++++++++++++ tenant/backend/igny8_core/settings.py | 616 ++++++ tenant/frontend/src/App.tsx | 843 ++++++++ .../src/components/auth/AdminGuard.tsx | 25 + .../src/components/auth/ProtectedRoute.tsx | 183 ++ .../src/components/common/ModuleGuard.tsx | 41 + tenant/frontend/src/layout/AppSidebar.tsx | 636 ++++++ .../07-MULTITENANCY-ACCESS-REFERENCE.md | 104 + 22 files changed, 12334 insertions(+) create mode 100644 tenant/backend/igny8_core/ai/ai_core.py create mode 100644 tenant/backend/igny8_core/api/base.py create mode 100644 tenant/backend/igny8_core/api/permissions.py create mode 100644 tenant/backend/igny8_core/api/throttles.py create mode 100644 tenant/backend/igny8_core/auth/middleware.py create mode 100644 tenant/backend/igny8_core/auth/models.py create mode 100644 tenant/backend/igny8_core/auth/permissions.py create mode 100644 tenant/backend/igny8_core/auth/views.py create mode 100644 tenant/backend/igny8_core/middleware/request_id.py create mode 100644 tenant/backend/igny8_core/middleware/resource_tracker.py create mode 100644 tenant/backend/igny8_core/modules/billing/views.py create mode 100644 tenant/backend/igny8_core/modules/planner/views.py create mode 100644 tenant/backend/igny8_core/modules/system/integration_views.py create mode 100644 tenant/backend/igny8_core/modules/system/views.py create mode 100644 tenant/backend/igny8_core/modules/writer/views.py create mode 100644 tenant/backend/igny8_core/settings.py create mode 100644 tenant/frontend/src/App.tsx create mode 100644 tenant/frontend/src/components/auth/AdminGuard.tsx create mode 100644 tenant/frontend/src/components/auth/ProtectedRoute.tsx create mode 100644 tenant/frontend/src/components/common/ModuleGuard.tsx create mode 100644 tenant/frontend/src/layout/AppSidebar.tsx create mode 100644 tenant/master-docs/00-system/07-MULTITENANCY-ACCESS-REFERENCE.md diff --git a/tenant/backend/igny8_core/ai/ai_core.py b/tenant/backend/igny8_core/ai/ai_core.py new file mode 100644 index 00000000..df9f166b --- /dev/null +++ b/tenant/backend/igny8_core/ai/ai_core.py @@ -0,0 +1,868 @@ +""" +AI Core - Centralized execution and logging layer for all AI requests +Handles API calls, model selection, response parsing, and console logging +""" +import logging +import json +import re +import requests +import time +from typing import Dict, Any, Optional, List +from django.conf import settings + +from .constants import ( + DEFAULT_AI_MODEL, + JSON_MODE_MODELS, + MODEL_RATES, + IMAGE_MODEL_RATES, + VALID_OPENAI_IMAGE_MODELS, + VALID_SIZES_BY_MODEL, + DEBUG_MODE, +) +from .tracker import ConsoleStepTracker + +logger = logging.getLogger(__name__) + + +class AICore: + """ + Centralized AI operations handler with console logging. + All AI requests go through run_ai_request() for consistent execution and logging. + """ + + def __init__(self, account=None): + """ + Initialize AICore with account context. + + Args: + account: Optional account object for API key/model loading + """ + self.account = account + self._openai_api_key = None + self._runware_api_key = None + self._load_account_settings() + + def _load_account_settings(self): + """Load API keys from IntegrationSettings with fallbacks (account -> system account -> Django settings)""" + def get_system_account(): + try: + from igny8_core.auth.models import Account + for slug in ['aws-admin', 'default-account', 'default']: + acct = Account.objects.filter(slug=slug).first() + if acct: + return acct + except Exception: + return None + return None + + def get_integration_key(integration_type: str, account): + if not account: + return None + try: + from igny8_core.modules.system.models import IntegrationSettings + settings_obj = IntegrationSettings.objects.filter( + integration_type=integration_type, + account=account, + is_active=True + ).first() + if settings_obj and settings_obj.config: + return settings_obj.config.get('apiKey') + except Exception as e: + logger.warning(f"Could not load {integration_type} settings for account {getattr(account, 'id', None)}: {e}", exc_info=True) + return None + + # 1) Account-specific keys + if self.account: + self._openai_api_key = get_integration_key('openai', self.account) + self._runware_api_key = get_integration_key('runware', self.account) + + # 2) Fallback to system account keys (shared across tenants) + if not self._openai_api_key or not self._runware_api_key: + system_account = get_system_account() + if not self._openai_api_key: + self._openai_api_key = get_integration_key('openai', system_account) + if not self._runware_api_key: + self._runware_api_key = get_integration_key('runware', system_account) + + # 3) Fallback to Django settings + if not self._openai_api_key: + self._openai_api_key = getattr(settings, 'OPENAI_API_KEY', None) + if not self._runware_api_key: + self._runware_api_key = getattr(settings, 'RUNWARE_API_KEY', None) + + def get_api_key(self, integration_type: str = 'openai') -> Optional[str]: + """Get API key for integration type""" + if integration_type == 'openai': + return self._openai_api_key + elif integration_type == 'runware': + return self._runware_api_key + return None + + def get_model(self, integration_type: str = 'openai') -> str: + """ + Get model for integration type. + DEPRECATED: Model should be passed directly to run_ai_request(). + This method is kept for backward compatibility but raises an error. + """ + raise ValueError( + "get_model() is deprecated. Model must be passed directly to run_ai_request(). " + "Use get_model_config() from settings.py to get model from IntegrationSettings." + ) + + def run_ai_request( + self, + prompt: str, + model: str, + max_tokens: int = 4000, + temperature: float = 0.7, + response_format: Optional[Dict] = None, + api_key: Optional[str] = None, + function_name: str = 'ai_request', + function_id: Optional[str] = None, + tracker: Optional[ConsoleStepTracker] = None + ) -> Dict[str, Any]: + """ + Centralized AI request handler with console logging. + All AI text generation requests go through this method. + + Args: + prompt: Prompt text + model: Model name (required - must be provided from IntegrationSettings) + max_tokens: Maximum tokens + temperature: Temperature (0-1) + response_format: Optional response format dict (for JSON mode) + api_key: Optional API key override + function_name: Function name for logging (e.g., 'cluster_keywords') + tracker: Optional ConsoleStepTracker instance for logging + + Returns: + Dict with 'content', 'input_tokens', 'output_tokens', 'total_tokens', + 'model', 'cost', 'error', 'api_id' + + Raises: + ValueError: If model is not provided + """ + # Use provided tracker or create a new one + if tracker is None: + tracker = ConsoleStepTracker(function_name) + + tracker.ai_call("Preparing request...") + + # Step 1: Validate model is provided + if not model: + error_msg = "Model is required. Ensure IntegrationSettings is configured for the account." + tracker.error('ConfigurationError', error_msg) + logger.error(f"[AICore] {error_msg}") + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': None, + 'cost': 0.0, + 'api_id': None, + } + + # Step 2: Validate API key + api_key = api_key or self._openai_api_key + if not api_key: + error_msg = 'OpenAI API key not configured' + tracker.error('ConfigurationError', error_msg) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': model, + 'cost': 0.0, + 'api_id': None, + } + + # Step 3: Use provided model (no fallback) + active_model = model + + # Debug logging: Show model used + logger.info(f"[AICore] Model Configuration:") + logger.info(f" - Model parameter passed: {model}") + logger.info(f" - Model used in request: {active_model}") + tracker.ai_call(f"Using model: {active_model}") + + if active_model not in MODEL_RATES: + error_msg = f"Model '{active_model}' is not supported. Supported models: {list(MODEL_RATES.keys())}" + logger.error(f"[AICore] {error_msg}") + tracker.error('ConfigurationError', error_msg) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + + tracker.ai_call(f"Using model: {active_model}") + + # Step 3: Auto-enable JSON mode for supported models + if response_format is None and active_model in JSON_MODE_MODELS: + response_format = {'type': 'json_object'} + tracker.ai_call(f"Auto-enabled JSON mode for {active_model}") + elif response_format: + tracker.ai_call(f"Using custom response format: {response_format}") + else: + tracker.ai_call("Using text response format") + + # Step 4: Validate prompt length and add function_id + prompt_length = len(prompt) + tracker.ai_call(f"Prompt length: {prompt_length} characters") + + # Add function_id to prompt if provided (for tracking) + final_prompt = prompt + if function_id: + function_id_prefix = f'function_id: "{function_id}"\n\n' + final_prompt = function_id_prefix + prompt + tracker.ai_call(f"Added function_id to prompt: {function_id}") + + # Step 5: Build request payload + url = 'https://api.openai.com/v1/chat/completions' + headers = { + 'Authorization': f'Bearer {api_key}', + 'Content-Type': 'application/json', + } + + body_data = { + 'model': active_model, + 'messages': [{'role': 'user', 'content': final_prompt}], + 'temperature': temperature, + } + + if max_tokens: + body_data['max_tokens'] = max_tokens + + if response_format: + body_data['response_format'] = response_format + + tracker.ai_call(f"Request payload prepared (model={active_model}, max_tokens={max_tokens}, temp={temperature})") + + # Step 6: Send request + tracker.ai_call("Sending request to OpenAI API...") + request_start = time.time() + + try: + response = requests.post(url, headers=headers, json=body_data, timeout=60) + request_duration = time.time() - request_start + tracker.ai_call(f"Received response in {request_duration:.2f}s (status={response.status_code})") + + # Step 7: Validate HTTP response + if response.status_code != 200: + error_data = response.json() if response.headers.get('content-type', '').startswith('application/json') else {} + error_message = f"HTTP {response.status_code} error" + + if isinstance(error_data, dict) and 'error' in error_data: + if isinstance(error_data['error'], dict) and 'message' in error_data['error']: + error_message += f": {error_data['error']['message']}" + + # Check for rate limit + if response.status_code == 429: + retry_after = response.headers.get('retry-after', '60') + tracker.rate_limit(retry_after) + error_message += f" (Rate limit - retry after {retry_after}s)" + else: + tracker.error('HTTPError', error_message) + + logger.error(f"OpenAI API HTTP error {response.status_code}: {error_message}") + + return { + 'content': None, + 'error': error_message, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + + # Step 8: Parse response JSON + try: + data = response.json() + except json.JSONDecodeError as e: + error_msg = f'Failed to parse JSON response: {str(e)}' + tracker.malformed_json(str(e)) + logger.error(error_msg) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + + api_id = data.get('id') + + # Step 9: Extract content + if 'choices' in data and len(data['choices']) > 0: + content = data['choices'][0]['message']['content'] + usage = data.get('usage', {}) + input_tokens = usage.get('prompt_tokens', 0) + output_tokens = usage.get('completion_tokens', 0) + total_tokens = usage.get('total_tokens', 0) + + tracker.parse(f"Received {total_tokens} tokens (input: {input_tokens}, output: {output_tokens})") + tracker.parse(f"Content length: {len(content)} characters") + + # Step 10: Calculate cost + rates = MODEL_RATES.get(active_model, {'input': 2.00, 'output': 8.00}) + cost = (input_tokens * rates['input'] + output_tokens * rates['output']) / 1_000_000 + tracker.parse(f"Cost calculated: ${cost:.6f}") + + tracker.done("Request completed successfully") + + return { + 'content': content, + 'input_tokens': input_tokens, + 'output_tokens': output_tokens, + 'total_tokens': total_tokens, + 'model': active_model, + 'cost': cost, + 'error': None, + 'api_id': api_id, + 'duration': request_duration, # Add duration tracking + } + else: + error_msg = 'No content in OpenAI response' + tracker.error('EmptyResponse', error_msg) + logger.error(error_msg) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': api_id, + } + + except requests.exceptions.Timeout: + error_msg = 'Request timeout (60s exceeded)' + tracker.timeout(60) + logger.error(error_msg) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + except requests.exceptions.RequestException as e: + error_msg = f'Request exception: {str(e)}' + tracker.error('RequestException', error_msg, e) + logger.error(f"OpenAI API error: {error_msg}", exc_info=True) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + except Exception as e: + error_msg = f'Unexpected error: {str(e)}' + logger.error(f"[AI][{function_name}][Error] {error_msg}", exc_info=True) + if tracker: + tracker.error('UnexpectedError', error_msg, e) + return { + 'content': None, + 'error': error_msg, + 'input_tokens': 0, + 'output_tokens': 0, + 'total_tokens': 0, + 'model': active_model, + 'cost': 0.0, + 'api_id': None, + } + + def extract_json(self, response_text: str) -> Optional[Dict]: + """ + Extract JSON from response text. + Handles markdown code blocks, multiline JSON, etc. + + Args: + response_text: Raw response text from AI + + Returns: + Parsed JSON dict or None + """ + if not response_text or not response_text.strip(): + return None + + # Try direct JSON parse first + try: + return json.loads(response_text.strip()) + except json.JSONDecodeError: + pass + + # Try to extract JSON from markdown code blocks + json_block_pattern = r'```(?:json)?\s*(\{.*?\}|\[.*?\])\s*```' + matches = re.findall(json_block_pattern, response_text, re.DOTALL) + if matches: + try: + return json.loads(matches[0]) + except json.JSONDecodeError: + pass + + # Try to find JSON object/array in text + json_pattern = r'(\{.*\}|\[.*\])' + matches = re.findall(json_pattern, response_text, re.DOTALL) + for match in matches: + try: + return json.loads(match) + except json.JSONDecodeError: + continue + + return None + + def generate_image( + self, + prompt: str, + provider: str = 'openai', + model: Optional[str] = None, + size: str = '1024x1024', + n: int = 1, + api_key: Optional[str] = None, + negative_prompt: Optional[str] = None, + function_name: str = 'generate_image' + ) -> Dict[str, Any]: + """ + Generate image using AI with console logging. + + Args: + prompt: Image prompt + provider: 'openai' or 'runware' + model: Model name + size: Image size + n: Number of images + api_key: Optional API key override + negative_prompt: Optional negative prompt + function_name: Function name for logging + + Returns: + Dict with 'url', 'revised_prompt', 'cost', 'error', etc. + """ + print(f"[AI][{function_name}] Step 1: Preparing image generation request...") + + if provider == 'openai': + return self._generate_image_openai(prompt, model, size, n, api_key, negative_prompt, function_name) + elif provider == 'runware': + return self._generate_image_runware(prompt, model, size, n, api_key, negative_prompt, function_name) + else: + error_msg = f'Unknown provider: {provider}' + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': provider, + 'cost': 0.0, + 'error': error_msg, + } + + def _generate_image_openai( + self, + prompt: str, + model: Optional[str], + size: str, + n: int, + api_key: Optional[str], + negative_prompt: Optional[str], + function_name: str + ) -> Dict[str, Any]: + """Generate image using OpenAI DALL-E""" + print(f"[AI][{function_name}] Provider: OpenAI") + + # Determine character limit based on model + # DALL-E 2: 1000 chars, DALL-E 3: 4000 chars + model = model or 'dall-e-3' + if model == 'dall-e-2': + max_length = 1000 + elif model == 'dall-e-3': + max_length = 4000 + else: + # Default to 1000 for safety + max_length = 1000 + + # CRITICAL: Truncate prompt to model-specific limit BEFORE any processing + if len(prompt) > max_length: + print(f"[AI][{function_name}][Warning] Prompt too long ({len(prompt)} chars), truncating to {max_length} for {model}") + # Try word-aware truncation, but fallback to hard truncate if no space found + truncated = prompt[:max_length - 3] + last_space = truncated.rfind(' ') + if last_space > max_length * 0.9: # Only use word-aware if we have a reasonable space + prompt = truncated[:last_space] + "..." + else: + prompt = prompt[:max_length] # Hard truncate if no good space found + print(f"[AI][{function_name}] Truncated prompt length: {len(prompt)}") + # Final safety check + if len(prompt) > max_length: + prompt = prompt[:max_length] + print(f"[AI][{function_name}][Error] Had to hard truncate to exactly {max_length} chars") + + api_key = api_key or self._openai_api_key + if not api_key: + error_msg = 'OpenAI API key not configured' + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + + model = model or 'dall-e-3' + print(f"[AI][{function_name}] Step 2: Using model: {model}, size: {size}") + + # Validate model + if model not in VALID_OPENAI_IMAGE_MODELS: + error_msg = f"Model '{model}' is not valid for OpenAI image generation. Only {', '.join(VALID_OPENAI_IMAGE_MODELS)} are supported." + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + + # Validate size + valid_sizes = VALID_SIZES_BY_MODEL.get(model, []) + if size not in valid_sizes: + error_msg = f"Image size '{size}' is not valid for model '{model}'. Valid sizes: {', '.join(valid_sizes)}" + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + + url = 'https://api.openai.com/v1/images/generations' + print(f"[AI][{function_name}] Step 3: Sending request to OpenAI Images API...") + + headers = { + 'Authorization': f'Bearer {api_key}', + 'Content-Type': 'application/json', + } + + data = { + 'model': model, + 'prompt': prompt, + 'n': n, + 'size': size + } + + if negative_prompt: + # Note: OpenAI DALL-E doesn't support negative_prompt in API, but we log it + print(f"[AI][{function_name}] Note: Negative prompt provided but OpenAI DALL-E doesn't support it") + + request_start = time.time() + try: + response = requests.post(url, headers=headers, json=data, timeout=150) + request_duration = time.time() - request_start + print(f"[AI][{function_name}] Step 4: Received response in {request_duration:.2f}s (status={response.status_code})") + + if response.status_code != 200: + error_data = response.json() if response.headers.get('content-type', '').startswith('application/json') else {} + error_message = f"HTTP {response.status_code} error" + if isinstance(error_data, dict) and 'error' in error_data: + if isinstance(error_data['error'], dict) and 'message' in error_data['error']: + error_message += f": {error_data['error']['message']}" + + print(f"[AI][{function_name}][Error] {error_message}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_message, + } + + body = response.json() + if 'data' in body and len(body['data']) > 0: + image_data = body['data'][0] + image_url = image_data.get('url') + revised_prompt = image_data.get('revised_prompt') + + cost = IMAGE_MODEL_RATES.get(model, 0.040) * n + print(f"[AI][{function_name}] Step 5: Image generated successfully") + print(f"[AI][{function_name}] Step 6: Cost: ${cost:.4f}") + print(f"[AI][{function_name}][Success] Image generation completed") + + return { + 'url': image_url, + 'revised_prompt': revised_prompt, + 'provider': 'openai', + 'cost': cost, + 'error': None, + } + else: + error_msg = 'No image data in response' + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + + except requests.exceptions.Timeout: + error_msg = 'Request timeout (150s exceeded)' + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + except Exception as e: + error_msg = f'Unexpected error: {str(e)}' + print(f"[AI][{function_name}][Error] {error_msg}") + logger.error(error_msg, exc_info=True) + return { + 'url': None, + 'revised_prompt': None, + 'provider': 'openai', + 'cost': 0.0, + 'error': error_msg, + } + + def _generate_image_runware( + self, + prompt: str, + model: Optional[str], + size: str, + n: int, + api_key: Optional[str], + negative_prompt: Optional[str], + function_name: str + ) -> Dict[str, Any]: + """Generate image using Runware""" + print(f"[AI][{function_name}] Provider: Runware") + + api_key = api_key or self._runware_api_key + if not api_key: + error_msg = 'Runware API key not configured' + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + runware_model = model or 'runware:97@1' + print(f"[AI][{function_name}] Step 2: Using model: {runware_model}, size: {size}") + + # Parse size + try: + width, height = map(int, size.split('x')) + except ValueError: + error_msg = f"Invalid size format: {size}. Expected format: WIDTHxHEIGHT" + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + url = 'https://api.runware.ai/v1' + print(f"[AI][{function_name}] Step 3: Sending request to Runware API...") + print(f"[AI][{function_name}] Runware API key check: has_key={bool(api_key)}, key_length={len(api_key) if api_key else 0}") + + # Runware uses array payload with authentication task first, then imageInference + # Reference: image-generation.php lines 79-97 + import uuid + payload = [ + { + 'taskType': 'authentication', + 'apiKey': api_key + }, + { + 'taskType': 'imageInference', + 'taskUUID': str(uuid.uuid4()), + 'positivePrompt': prompt, + 'negativePrompt': negative_prompt or '', + 'model': runware_model, + 'width': width, + 'height': height, + 'steps': 30, + 'CFGScale': 7.5, + 'numberResults': 1, + 'outputFormat': 'webp' + } + ] + + request_start = time.time() + try: + response = requests.post(url, json=payload, timeout=150) + request_duration = time.time() - request_start + print(f"[AI][{function_name}] Step 4: Received response in {request_duration:.2f}s (status={response.status_code})") + + if response.status_code != 200: + error_msg = f"HTTP {response.status_code} error" + print(f"[AI][{function_name}][Error] {error_msg}") + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + body = response.json() + print(f"[AI][{function_name}] Runware response type: {type(body)}, length: {len(body) if isinstance(body, list) else 'N/A'}") + logger.info(f"[AI][{function_name}] Runware response body (first 1000 chars): {str(body)[:1000]}") + + # Runware returns array: [auth_result, image_result] + # image_result has 'data' array with image objects containing 'imageURL' + # Reference: AIProcessor has more robust parsing - match that logic + image_url = None + error_msg = None + + if isinstance(body, list): + # Case 1: Array response - find the imageInference result + print(f"[AI][{function_name}] Response is array with {len(body)} elements") + for idx, item in enumerate(body): + print(f"[AI][{function_name}] Array element {idx}: {type(item)}, keys: {list(item.keys()) if isinstance(item, dict) else 'N/A'}") + if isinstance(item, dict): + # Check if this is the image result with 'data' key + if 'data' in item: + data = item['data'] + print(f"[AI][{function_name}] Found 'data' key, type: {type(data)}") + if isinstance(data, list) and len(data) > 0: + first_item = data[0] + print(f"[AI][{function_name}] First data item keys: {list(first_item.keys()) if isinstance(first_item, dict) else 'N/A'}") + image_url = first_item.get('imageURL') or first_item.get('image_url') + if image_url: + print(f"[AI][{function_name}] Found imageURL: {image_url[:50]}...") + break + # Check for errors + if 'errors' in item: + errors = item['errors'] + print(f"[AI][{function_name}] Found 'errors' key, type: {type(errors)}") + if isinstance(errors, list) and len(errors) > 0: + error_obj = errors[0] + error_msg = error_obj.get('message') or error_obj.get('error') or str(error_obj) + print(f"[AI][{function_name}][Error] Error in response: {error_msg}") + break + # Check for error at root level + if 'error' in item: + error_msg = item['error'] + print(f"[AI][{function_name}][Error] Error at root level: {error_msg}") + break + elif isinstance(body, dict): + # Case 2: Direct dict response + print(f"[AI][{function_name}] Response is dict with keys: {list(body.keys())}") + if 'data' in body: + data = body['data'] + print(f"[AI][{function_name}] Found 'data' key, type: {type(data)}") + if isinstance(data, list) and len(data) > 0: + first_item = data[0] + print(f"[AI][{function_name}] First data item keys: {list(first_item.keys()) if isinstance(first_item, dict) else 'N/A'}") + image_url = first_item.get('imageURL') or first_item.get('image_url') + elif 'errors' in body: + errors = body['errors'] + print(f"[AI][{function_name}] Found 'errors' key, type: {type(errors)}") + if isinstance(errors, list) and len(errors) > 0: + error_obj = errors[0] + error_msg = error_obj.get('message') or error_obj.get('error') or str(error_obj) + print(f"[AI][{function_name}][Error] Error in response: {error_msg}") + elif 'error' in body: + error_msg = body['error'] + print(f"[AI][{function_name}][Error] Error at root level: {error_msg}") + + if error_msg: + print(f"[AI][{function_name}][Error] Runware API error: {error_msg}") + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + if image_url: + + cost = 0.009 * n # Runware pricing + print(f"[AI][{function_name}] Step 5: Image generated successfully") + print(f"[AI][{function_name}] Step 6: Cost: ${cost:.4f}") + print(f"[AI][{function_name}][Success] Image generation completed") + + return { + 'url': image_url, + 'provider': 'runware', + 'cost': cost, + 'error': None, + } + else: + # If we get here, we couldn't parse the response + error_msg = f'No image data in Runware response. Response type: {type(body).__name__}' + print(f"[AI][{function_name}][Error] {error_msg}") + logger.error(f"[AI][{function_name}] Full Runware response: {json.dumps(body, indent=2) if isinstance(body, (dict, list)) else str(body)}") + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + except Exception as e: + error_msg = f'Unexpected error: {str(e)}' + print(f"[AI][{function_name}][Error] {error_msg}") + logger.error(error_msg, exc_info=True) + return { + 'url': None, + 'provider': 'runware', + 'cost': 0.0, + 'error': error_msg, + } + + def calculate_cost(self, model: str, input_tokens: int, output_tokens: int, model_type: str = 'text') -> float: + """Calculate cost for API call""" + if model_type == 'text': + rates = MODEL_RATES.get(model, {'input': 2.00, 'output': 8.00}) + input_cost = (input_tokens / 1_000_000) * rates['input'] + output_cost = (output_tokens / 1_000_000) * rates['output'] + return input_cost + output_cost + elif model_type == 'image': + rate = IMAGE_MODEL_RATES.get(model, 0.040) + return rate * 1 + return 0.0 + + # Legacy method names for backward compatibility + def call_openai(self, prompt: str, model: Optional[str] = None, max_tokens: int = 4000, + temperature: float = 0.7, response_format: Optional[Dict] = None, + api_key: Optional[str] = None) -> Dict[str, Any]: + """Legacy method - redirects to run_ai_request()""" + return self.run_ai_request( + prompt=prompt, + model=model, + max_tokens=max_tokens, + temperature=temperature, + response_format=response_format, + api_key=api_key, + function_name='call_openai' + ) diff --git a/tenant/backend/igny8_core/api/base.py b/tenant/backend/igny8_core/api/base.py new file mode 100644 index 00000000..223cb99a --- /dev/null +++ b/tenant/backend/igny8_core/api/base.py @@ -0,0 +1,433 @@ +""" +Base ViewSet with account filtering support +Unified API Standard v1.0 compliant +""" +from rest_framework import viewsets, status +from rest_framework.response import Response +from rest_framework.exceptions import ValidationError as DRFValidationError +from django.core.exceptions import PermissionDenied +from .response import success_response, error_response + + +class AccountModelViewSet(viewsets.ModelViewSet): + """ + Base ViewSet that automatically filters by account. + All module ViewSets should inherit from this. + """ + def get_queryset(self): + queryset = super().get_queryset() + # Filter by account if model has account field + if hasattr(queryset.model, 'account'): + user = getattr(self.request, 'user', None) + + # ADMIN/DEV/SYSTEM ACCOUNT OVERRIDE: Skip account filtering for: + # - Admins and developers (by role) + # - Users in system accounts (aws-admin, default-account) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + try: + # Check if user has admin/developer privileges + is_admin_or_dev = (hasattr(user, 'is_admin_or_developer') and user.is_admin_or_developer()) if user else False + is_system_user = (hasattr(user, 'is_system_account_user') and user.is_system_account_user()) if user else False + + if is_admin_or_dev or is_system_user: + # Skip account filtering - allow all accounts + pass + else: + # Get account from request (set by middleware) + account = getattr(self.request, 'account', None) + if account: + queryset = queryset.filter(account=account) + elif hasattr(self.request, 'user') and self.request.user and hasattr(self.request.user, 'is_authenticated') and self.request.user.is_authenticated: + # Fallback to user's account + try: + user_account = getattr(self.request.user, 'account', None) + if user_account: + queryset = queryset.filter(account=user_account) + except (AttributeError, Exception): + # If account access fails (e.g., column mismatch), skip account filtering + pass + except (AttributeError, TypeError) as e: + # If there's an error accessing user attributes, return empty queryset + return queryset.none() + else: + # Require authentication - return empty queryset for unauthenticated users + return queryset.none() + return queryset + + def perform_create(self, serializer): + # Set account from request (set by middleware) + account = getattr(self.request, 'account', None) + if not account and hasattr(self.request, 'user') and self.request.user and self.request.user.is_authenticated: + try: + account = getattr(self.request.user, 'account', None) + except (AttributeError, Exception): + # If account access fails (e.g., column mismatch), set to None + account = None + + # If model has account field, set it + if account and hasattr(serializer.Meta.model, 'account'): + serializer.save(account=account) + else: + serializer.save() + + def get_serializer_context(self): + context = super().get_serializer_context() + # Add account to context for serializers + account = getattr(self.request, 'account', None) + if account: + context['account'] = account + return context + + def retrieve(self, request, *args, **kwargs): + """ + Override retrieve to return unified format + """ + try: + instance = self.get_object() + serializer = self.get_serializer(instance) + return success_response(data=serializer.data, request=request) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + def create(self, request, *args, **kwargs): + """ + Override create to return unified format + """ + serializer = self.get_serializer(data=request.data) + try: + serializer.is_valid(raise_exception=True) + self.perform_create(serializer) + headers = self.get_success_headers(serializer.data) + return success_response( + data=serializer.data, + message='Created successfully', + request=request, + status_code=status.HTTP_201_CREATED + ) + except DRFValidationError as e: + return error_response( + error='Validation error', + errors=e.detail if hasattr(e, 'detail') else str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error in create method: {str(e)}", exc_info=True) + # Check if it's a validation-related error + if 'required' in str(e).lower() or 'invalid' in str(e).lower() or 'validation' in str(e).lower(): + return error_response( + error='Validation error', + errors=str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + # For other errors, return 500 + return error_response( + error=f'Internal server error: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def update(self, request, *args, **kwargs): + """ + Override update to return unified format + """ + partial = kwargs.pop('partial', False) + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=partial) + try: + serializer.is_valid(raise_exception=True) + self.perform_update(serializer) + return success_response( + data=serializer.data, + message='Updated successfully', + request=request + ) + except DRFValidationError as e: + return error_response( + error='Validation error', + errors=e.detail if hasattr(e, 'detail') else str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error in create method: {str(e)}", exc_info=True) + # Check if it's a validation-related error + if 'required' in str(e).lower() or 'invalid' in str(e).lower() or 'validation' in str(e).lower(): + return error_response( + error='Validation error', + errors=str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + # For other errors, return 500 + return error_response( + error=f'Internal server error: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def destroy(self, request, *args, **kwargs): + """ + Override destroy to return unified format + """ + try: + instance = self.get_object() + # Protect system account + if hasattr(instance, 'slug') and getattr(instance, 'slug', '') == 'aws-admin': + from django.core.exceptions import PermissionDenied + raise PermissionDenied("System account cannot be deleted.") + + if hasattr(instance, 'soft_delete'): + user = getattr(request, 'user', None) + retention_days = None + account = getattr(instance, 'account', None) + if account and hasattr(account, 'deletion_retention_days'): + retention_days = account.deletion_retention_days + elif hasattr(instance, 'deletion_retention_days'): + retention_days = getattr(instance, 'deletion_retention_days', None) + instance.soft_delete( + user=user if getattr(user, 'is_authenticated', False) else None, + retention_days=retention_days, + reason='api_delete' + ) + else: + self.perform_destroy(instance) + return success_response( + data=None, + message='Deleted successfully', + request=request, + status_code=status.HTTP_204_NO_CONTENT + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + def list(self, request, *args, **kwargs): + """ + Override list to return unified format + """ + queryset = self.filter_queryset(self.get_queryset()) + + # Check if pagination is enabled + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + # Use paginator's get_paginated_response which already returns unified format + return self.get_paginated_response(serializer.data) + + # No pagination - return all results in unified format + serializer = self.get_serializer(queryset, many=True) + return success_response( + data=serializer.data, + request=request + ) + + +class SiteSectorModelViewSet(AccountModelViewSet): + """ + Base ViewSet for models that belong to Site and Sector (Keywords, Clusters, etc.). + Automatically filters by: + 1. Account (via parent class) + 2. User's accessible sites (based on role and SiteUserAccess) + 3. Optional site/sector query parameters + """ + def get_queryset(self): + queryset = super().get_queryset() + + # Check if model has site and sector fields (SiteSectorBaseModel) + if hasattr(queryset.model, 'site') and hasattr(queryset.model, 'sector'): + user = getattr(self.request, 'user', None) + + # Check if user is authenticated and is a proper User instance (not AnonymousUser) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated and hasattr(user, 'get_accessible_sites'): + try: + # ADMIN/DEV/SYSTEM ACCOUNT OVERRIDE: Developers, admins, and system account users + # can see all data regardless of site/sector + if (hasattr(user, 'is_admin_or_developer') and user.is_admin_or_developer()) or \ + (hasattr(user, 'is_system_account_user') and user.is_system_account_user()): + # Skip site/sector filtering for admins, developers, and system account users + # But still respect optional query params if provided + pass + else: + # Get user's accessible sites + accessible_sites = user.get_accessible_sites() + + # If no accessible sites, return empty queryset (unless admin/developer/system account) + if not accessible_sites.exists(): + queryset = queryset.none() + else: + # Filter by accessible sites + queryset = queryset.filter(site__in=accessible_sites) + except (AttributeError, TypeError) as e: + # If there's an error accessing user attributes, return empty queryset + queryset = queryset.none() + else: + # Require authentication - return empty queryset for unauthenticated users + queryset = queryset.none() + + # Optional: Filter by specific site (from query params) + # Safely access query_params (DRF wraps request with Request class) + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + # Fallback for non-DRF requests + query_params = getattr(self.request, 'GET', {}) + site_id = query_params.get('site_id') or query_params.get('site') + else: + site_id = query_params.get('site_id') or query_params.get('site') + except AttributeError: + site_id = None + + if site_id: + try: + # Convert site_id to int if it's a string + site_id_int = int(site_id) if site_id else None + if site_id_int: + # ADMIN/DEV/SYSTEM ACCOUNT OVERRIDE: Admins, developers, and system account users + # can filter by any site, others must verify access + if user and hasattr(user, 'is_authenticated') and user.is_authenticated and hasattr(user, 'get_accessible_sites'): + try: + if (hasattr(user, 'is_admin_or_developer') and user.is_admin_or_developer()) or \ + (hasattr(user, 'is_system_account_user') and user.is_system_account_user()): + # Admin/Developer/System Account User can filter by any site + queryset = queryset.filter(site_id=site_id_int) + else: + accessible_sites = user.get_accessible_sites() + if accessible_sites.filter(id=site_id_int).exists(): + queryset = queryset.filter(site_id=site_id_int) + else: + queryset = queryset.none() # Site not accessible + except (AttributeError, TypeError) as e: + # If there's an error accessing user attributes, return empty queryset + queryset = queryset.none() + else: + # Require authentication for site filtering + queryset = queryset.none() + except (ValueError, TypeError): + # Invalid site_id, return empty queryset + queryset = queryset.none() + + # Optional: Filter by specific sector (from query params) + # Safely access query_params (DRF wraps request with Request class) + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + # Fallback for non-DRF requests + query_params = getattr(self.request, 'GET', {}) + sector_id = query_params.get('sector_id') + else: + sector_id = query_params.get('sector_id') + except AttributeError: + sector_id = None + + if sector_id: + try: + # Convert sector_id to int if it's a string + sector_id_int = int(sector_id) if sector_id else None + if sector_id_int: + queryset = queryset.filter(sector_id=sector_id_int) + # If site_id also provided, ensure sector belongs to that site + if site_id: + try: + site_id_int = int(site_id) if site_id else None + if site_id_int: + queryset = queryset.filter(site_id=site_id_int) + except (ValueError, TypeError): + pass + except (ValueError, TypeError): + # Invalid sector_id, return empty queryset + queryset = queryset.none() + + # Always exclude records where site is null (orphaned records) + # This prevents showing keywords/clusters/ideas/tasks that aren't associated with any site + # Only skip this if explicitly requested (e.g., for admin cleanup operations) + queryset = queryset.exclude(site__isnull=True) + + return queryset + + def perform_create(self, serializer): + # First call parent to set account + super().perform_create(serializer) + + # If model has site and sector fields, validate access + if hasattr(serializer.Meta.model, 'site') and hasattr(serializer.Meta.model, 'sector'): + user = getattr(self.request, 'user', None) + site = serializer.validated_data.get('site') + sector = serializer.validated_data.get('sector') + + if user and hasattr(user, 'is_authenticated') and user.is_authenticated and site: + try: + # ADMIN/DEV/SYSTEM ACCOUNT OVERRIDE: Admins, developers, and system account users + # can create in any site, others must verify access + if not ((hasattr(user, 'is_admin_or_developer') and user.is_admin_or_developer()) or + (hasattr(user, 'is_system_account_user') and user.is_system_account_user())): + if hasattr(user, 'get_accessible_sites'): + accessible_sites = user.get_accessible_sites() + if not accessible_sites.filter(id=site.id).exists(): + raise PermissionDenied("You do not have access to this site") + + # Verify sector belongs to site + if sector and hasattr(sector, 'site') and sector.site != site: + raise PermissionDenied("Sector must belong to the selected site") + except (AttributeError, TypeError) as e: + # If there's an error accessing user attributes, raise permission denied + raise PermissionDenied("Unable to verify access permissions") + + def get_serializer_context(self): + context = super().get_serializer_context() + user = getattr(self.request, 'user', None) + + # Add accessible sites to context for serializer (e.g., for dropdown choices) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated and hasattr(user, 'get_accessible_sites'): + try: + context['accessible_sites'] = user.get_accessible_sites() + # Get accessible sectors from accessible sites + from igny8_core.auth.models import Sector + context['accessible_sectors'] = Sector.objects.filter( + site__in=context['accessible_sites'], + is_active=True + ) + except (AttributeError, TypeError): + # If there's an error, set empty querysets + from igny8_core.auth.models import Site, Sector + context['accessible_sites'] = Site.objects.none() + context['accessible_sectors'] = Sector.objects.none() + else: + # Set empty querysets for unauthenticated users + from igny8_core.auth.models import Site, Sector + context['accessible_sites'] = Site.objects.none() + context['accessible_sectors'] = Sector.objects.none() + + return context + + +class StandardResponseMixin: + """ + Mixin for standard API response format. + """ + def get_response(self, data, message=None, status_code=200): + return Response({ + 'success': True, + 'message': message, + 'data': data + }, status=status_code) + + def get_error_response(self, message, errors=None, status_code=400): + return Response({ + 'success': False, + 'message': message, + 'errors': errors + }, status=status_code) + diff --git a/tenant/backend/igny8_core/api/permissions.py b/tenant/backend/igny8_core/api/permissions.py new file mode 100644 index 00000000..be1a13e9 --- /dev/null +++ b/tenant/backend/igny8_core/api/permissions.py @@ -0,0 +1,180 @@ +""" +Standardized Permission Classes +Provides consistent permission checking across all endpoints +""" +from rest_framework import permissions +from rest_framework.exceptions import PermissionDenied + + +class IsAuthenticatedAndActive(permissions.BasePermission): + """ + Permission class that requires user to be authenticated and active + Base permission for most endpoints + """ + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # Check if user is active + if hasattr(request.user, 'is_active'): + return request.user.is_active + + return True + + +class HasTenantAccess(permissions.BasePermission): + """ + Permission class that requires user to belong to the tenant/account + Ensures tenant isolation + """ + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # Get account from request (set by middleware) + account = getattr(request, 'account', None) + + # If no account in request, try to get from user + if not account and hasattr(request.user, 'account'): + try: + account = request.user.account + except (AttributeError, Exception): + pass + + # Admin/Developer/System account users bypass tenant check + if request.user and hasattr(request.user, 'is_authenticated') and request.user.is_authenticated: + try: + is_admin_or_dev = (hasattr(request.user, 'is_admin_or_developer') and + request.user.is_admin_or_developer()) if request.user else False + is_system_user = (hasattr(request.user, 'is_system_account_user') and + request.user.is_system_account_user()) if request.user else False + + if is_admin_or_dev or is_system_user: + return True + except (AttributeError, TypeError): + pass + + # Regular users must have account access + if account: + # Check if user belongs to this account + if hasattr(request.user, 'account'): + try: + user_account = request.user.account + return user_account == account or user_account.id == account.id + except (AttributeError, Exception): + pass + + return False + + +class IsViewerOrAbove(permissions.BasePermission): + """ + Permission class that requires viewer, editor, admin, or owner role + For read-only operations + """ + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # Admin/Developer/System account users always have access + try: + is_admin_or_dev = (hasattr(request.user, 'is_admin_or_developer') and + request.user.is_admin_or_developer()) if request.user else False + is_system_user = (hasattr(request.user, 'is_system_account_user') and + request.user.is_system_account_user()) if request.user else False + + if is_admin_or_dev or is_system_user: + return True + except (AttributeError, TypeError): + pass + + # Check user role + if hasattr(request.user, 'role'): + role = request.user.role + # viewer, editor, admin, owner all have access + return role in ['viewer', 'editor', 'admin', 'owner'] + + # If no role system, allow authenticated users + return True + + +class IsEditorOrAbove(permissions.BasePermission): + """ + Permission class that requires editor, admin, or owner role + For content operations + """ + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # Admin/Developer/System account users always have access + try: + is_admin_or_dev = (hasattr(request.user, 'is_admin_or_developer') and + request.user.is_admin_or_developer()) if request.user else False + is_system_user = (hasattr(request.user, 'is_system_account_user') and + request.user.is_system_account_user()) if request.user else False + + if is_admin_or_dev or is_system_user: + return True + except (AttributeError, TypeError): + pass + + # Check user role + if hasattr(request.user, 'role'): + role = request.user.role + # editor, admin, owner have access + return role in ['editor', 'admin', 'owner'] + + # If no role system, allow authenticated users + return True + + +class IsAdminOrOwner(permissions.BasePermission): + """ + Permission class that requires admin or owner role only + For settings, keys, billing operations + """ + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # Admin/Developer/System account users always have access + try: + is_admin_or_dev = (hasattr(request.user, 'is_admin_or_developer') and + request.user.is_admin_or_developer()) if request.user else False + is_system_user = (hasattr(request.user, 'is_system_account_user') and + request.user.is_system_account_user()) if request.user else False + + if is_admin_or_dev or is_system_user: + return True + except (AttributeError, TypeError): + pass + + # Check user role + if hasattr(request.user, 'role'): + role = request.user.role + # admin, owner have access + return role in ['admin', 'owner'] + + # If no role system, deny by default for security + return False + + +class IsSystemAccountOrDeveloper(permissions.BasePermission): + """ + Allow only system accounts (aws-admin/default-account/default) or developer role. + Use for sensitive, globally-scoped settings like integration API keys. + """ + def has_permission(self, request, view): + user = getattr(request, "user", None) + if not user or not user.is_authenticated: + return False + + account_slug = getattr(getattr(user, "account", None), "slug", None) + if user.role == "developer": + return True + if account_slug in ["aws-admin", "default-account", "default"]: + return True + return False + + diff --git a/tenant/backend/igny8_core/api/throttles.py b/tenant/backend/igny8_core/api/throttles.py new file mode 100644 index 00000000..3eb8e195 --- /dev/null +++ b/tenant/backend/igny8_core/api/throttles.py @@ -0,0 +1,146 @@ +""" +Scoped Rate Throttling +Provides rate limiting with different scopes for different operation types +""" +from rest_framework.throttling import ScopedRateThrottle +from django.conf import settings +import logging + +logger = logging.getLogger(__name__) + + +class DebugScopedRateThrottle(ScopedRateThrottle): + """ + Scoped rate throttle that can be bypassed in debug mode + + Usage: + class MyViewSet(viewsets.ModelViewSet): + throttle_scope = 'planner' + throttle_classes = [DebugScopedRateThrottle] + """ + + def allow_request(self, request, view): + """ + Check if request should be throttled + + Bypasses throttling if: + - DEBUG mode is True + - IGNY8_DEBUG_THROTTLE environment variable is True + - User belongs to aws-admin or other system accounts + - User is admin/developer role + - Public blueprint list request with site filter (for Sites Renderer) + """ + # Check if throttling should be bypassed + debug_bypass = getattr(settings, 'DEBUG', False) + env_bypass = getattr(settings, 'IGNY8_DEBUG_THROTTLE', False) + + # Bypass for public blueprint list requests (Sites Renderer fallback) + public_blueprint_bypass = False + if hasattr(view, 'action') and view.action == 'list': + if hasattr(request, 'query_params') and request.query_params.get('site'): + if not request.user or not hasattr(request.user, 'is_authenticated') or not request.user.is_authenticated: + public_blueprint_bypass = True + + # Bypass for authenticated users (avoid user-facing 429s) and system accounts + system_account_bypass = False + authenticated_bypass = False + if hasattr(request, 'user') and request.user and hasattr(request.user, 'is_authenticated') and request.user.is_authenticated: + authenticated_bypass = True # Do not throttle logged-in users + try: + # Check if user is in system account (aws-admin, default-account, default) + if hasattr(request.user, 'is_system_account_user') and request.user.is_system_account_user(): + system_account_bypass = True + # Also bypass for admin/developer roles + elif hasattr(request.user, 'is_admin_or_developer') and request.user.is_admin_or_developer(): + system_account_bypass = True + except (AttributeError, Exception): + # If checking fails, continue with normal throttling + pass + + if debug_bypass or env_bypass or system_account_bypass or public_blueprint_bypass or authenticated_bypass: + # In debug mode or for system accounts, still set throttle headers but don't actually throttle + # This allows testing throttle headers without blocking requests + if hasattr(self, 'get_rate'): + # Set headers for debugging + self.scope = getattr(view, 'throttle_scope', None) + if self.scope: + # Get rate for this scope + rate = self.get_rate() + if rate: + # Parse rate (e.g., "10/min") + num_requests, duration = self.parse_rate(rate) + # Set headers + request._throttle_debug_info = { + 'scope': self.scope, + 'rate': rate, + 'limit': num_requests, + 'duration': duration + } + return True + + # Normal throttling behavior + return super().allow_request(request, view) + + def get_rate(self): + """ + Get rate for the current scope + """ + if not self.scope: + return None + + # Get throttle rates from settings + throttle_rates = getattr(settings, 'REST_FRAMEWORK', {}).get('DEFAULT_THROTTLE_RATES', {}) + + # Get rate for this scope + rate = throttle_rates.get(self.scope) + + # Fallback to default if scope not found + if not rate: + rate = throttle_rates.get('default', '100/min') + + return rate + + def parse_rate(self, rate): + """ + Parse rate string (e.g., "10/min") into (num_requests, duration) + + Returns: + tuple: (num_requests, duration_in_seconds) + """ + if not rate: + return None, None + + try: + num, period = rate.split('/') + num_requests = int(num) + + # Parse duration + period = period.strip().lower() + if period == 'sec' or period == 's': + duration = 1 + elif period == 'min' or period == 'm': + duration = 60 + elif period == 'hour' or period == 'h': + duration = 3600 + elif period == 'day' or period == 'd': + duration = 86400 + else: + # Default to seconds + duration = 1 + + return num_requests, duration + except (ValueError, AttributeError): + # Invalid rate format, default to 100/min + logger.warning(f"Invalid rate format: {rate}, defaulting to 100/min") + return 100, 60 + + def throttle_success(self): + """ + Called when request is allowed + Sets throttle headers on response + """ + # This is called by DRF after allow_request returns True + # Headers are set automatically by ScopedRateThrottle + pass + + diff --git a/tenant/backend/igny8_core/auth/middleware.py b/tenant/backend/igny8_core/auth/middleware.py new file mode 100644 index 00000000..9628dc47 --- /dev/null +++ b/tenant/backend/igny8_core/auth/middleware.py @@ -0,0 +1,174 @@ +""" +Multi-Account Middleware +Extracts account from JWT token and injects into request context +""" +from django.utils.deprecation import MiddlewareMixin +from django.http import JsonResponse +from django.contrib.auth import logout +from rest_framework import status + +try: + import jwt + JWT_AVAILABLE = True +except ImportError: + JWT_AVAILABLE = False + +from django.conf import settings + + +class AccountContextMiddleware(MiddlewareMixin): + """ + Middleware that extracts account information from JWT token + and adds it to request context for account isolation. + """ + + def process_request(self, request): + """Extract account from JWT token in Authorization header or session.""" + # Skip for admin and auth endpoints + if request.path.startswith('/admin/') or request.path.startswith('/api/v1/auth/'): + return None + + # First, try to get user from Django session (cookie-based auth) + # This handles cases where frontend uses credentials: 'include' with session cookies + if hasattr(request, 'user') and request.user and request.user.is_authenticated: + # User is authenticated via session - refresh from DB to get latest account/plan data + # This ensures changes to account/plan are reflected immediately without re-login + try: + from .models import User as UserModel + # Refresh user from DB with account and plan relationships to get latest data + # This is important so account/plan changes are reflected immediately + user = UserModel.objects.select_related('account', 'account__plan').get(id=request.user.id) + # Update request.user with fresh data + request.user = user + # Get account from refreshed user + user_account = getattr(user, 'account', None) + validation_error = self._validate_account_and_plan(request, user) + if validation_error: + return validation_error + request.account = getattr(user, 'account', None) + return None + except (AttributeError, UserModel.DoesNotExist, Exception): + # If refresh fails, fallback to cached account + try: + user_account = getattr(request.user, 'account', None) + if user_account: + validation_error = self._validate_account_and_plan(request, request.user) + if validation_error: + return validation_error + request.account = user_account + return None + except (AttributeError, Exception): + pass + # If account access fails (e.g., column mismatch), set to None + request.account = None + return None + + # Get token from Authorization header (JWT auth - for future implementation) + auth_header = request.META.get('HTTP_AUTHORIZATION', '') + if not auth_header.startswith('Bearer '): + # No JWT token - if session auth didn't work, set account to None + # But don't set request.user to None - it might be set by Django's auth middleware + if not hasattr(request, 'account'): + request.account = None + return None + + token = auth_header.split(' ')[1] if len(auth_header.split(' ')) > 1 else None + if not token: + if not hasattr(request, 'account'): + request.account = None + return None + + try: + if not JWT_AVAILABLE: + # JWT library not installed yet - skip for now + request.account = None + return None + + # Decode JWT token with signature verification + # Use JWT_SECRET_KEY from settings (falls back to SECRET_KEY if not set) + jwt_secret = getattr(settings, 'JWT_SECRET_KEY', getattr(settings, 'SECRET_KEY', None)) + if not jwt_secret: + raise ValueError("JWT_SECRET_KEY or SECRET_KEY must be set in settings") + + decoded = jwt.decode(token, jwt_secret, algorithms=[getattr(settings, 'JWT_ALGORITHM', 'HS256')]) + + # Extract user and account info from token + user_id = decoded.get('user_id') + account_id = decoded.get('account_id') + + if user_id: + from .models import User, Account + try: + # Get user from DB (but don't set request.user - let DRF authentication handle that) + # Only set request.account for account context + user = User.objects.select_related('account', 'account__plan').get(id=user_id) + validation_error = self._validate_account_and_plan(request, user) + if validation_error: + return validation_error + if account_id: + # Verify account still exists + try: + account = Account.objects.get(id=account_id) + request.account = account + except Account.DoesNotExist: + # Account from token doesn't exist - don't fallback, set to None + request.account = None + else: + # No account_id in token - set to None (don't fallback to user.account) + request.account = None + except (User.DoesNotExist, Account.DoesNotExist): + request.account = None + else: + request.account = None + + except jwt.InvalidTokenError: + request.account = None + except Exception: + # Fail silently for now - allow unauthenticated access + request.account = None + + return None + + def _validate_account_and_plan(self, request, user): + """ + Ensure the authenticated user has an account and an active plan. + If not, logout the user (for session auth) and block the request. + """ + try: + account = getattr(user, 'account', None) + except Exception: + account = None + + if not account: + return self._deny_request( + request, + error='Account not configured for this user. Please contact support.', + status_code=status.HTTP_403_FORBIDDEN, + ) + + plan = getattr(account, 'plan', None) + if plan is None or getattr(plan, 'is_active', False) is False: + return self._deny_request( + request, + error='Active subscription required. Visit igny8.com/pricing to subscribe.', + status_code=status.HTTP_402_PAYMENT_REQUIRED, + ) + + return None + + def _deny_request(self, request, error, status_code): + """Logout session users (if any) and return a consistent JSON error.""" + try: + if hasattr(request, 'user') and request.user and request.user.is_authenticated: + logout(request) + except Exception: + pass + + return JsonResponse( + { + 'success': False, + 'error': error, + }, + status=status_code, + ) + diff --git a/tenant/backend/igny8_core/auth/models.py b/tenant/backend/igny8_core/auth/models.py new file mode 100644 index 00000000..50cc3274 --- /dev/null +++ b/tenant/backend/igny8_core/auth/models.py @@ -0,0 +1,647 @@ +""" +Multi-Account and Authentication Models +""" +from django.db import models +from django.contrib.auth.models import AbstractUser +from django.utils.translation import gettext_lazy as _ +from django.core.validators import MinValueValidator, MaxValueValidator +from igny8_core.common.soft_delete import SoftDeletableModel, SoftDeleteManager + + +class AccountBaseModel(models.Model): + """ + Abstract base model for all account-isolated models. + All models should inherit from this to ensure account isolation. + """ + account = models.ForeignKey('igny8_core_auth.Account', on_delete=models.CASCADE, related_name='%(class)s_set', db_index=True, db_column='tenant_id') + created_at = models.DateTimeField(auto_now_add=True, db_index=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + abstract = True + indexes = [ + models.Index(fields=['account', 'created_at']), + ] + + + +class SiteSectorBaseModel(AccountBaseModel): + """ + Abstract base model for models that belong to a Site and Sector. + Provides automatic filtering by site/sector based on user access. + Models like Keywords and Clusters should inherit from this. + """ + site = models.ForeignKey('igny8_core_auth.Site', on_delete=models.CASCADE, related_name='%(class)s_set', db_index=True) + sector = models.ForeignKey('igny8_core_auth.Sector', on_delete=models.CASCADE, related_name='%(class)s_set', db_index=True) + + class Meta: + abstract = True + indexes = [ + models.Index(fields=['account', 'site', 'sector']), + models.Index(fields=['site', 'sector']), + ] + + def save(self, *args, **kwargs): + """Ensure site and sector belong to same account.""" + # Set account from site + if self.site: + self.account = self.site.account + # Ensure sector belongs to site + if self.sector and self.sector.site != self.site: + from django.core.exceptions import ValidationError + raise ValidationError("Sector must belong to the same site") + super().save(*args, **kwargs) + + +class Account(SoftDeletableModel): + """ + Account/Organization model for multi-account support. + """ + STATUS_CHOICES = [ + ('active', 'Active'), + ('suspended', 'Suspended'), + ('trial', 'Trial'), + ('cancelled', 'Cancelled'), + ] + + name = models.CharField(max_length=255) + slug = models.SlugField(unique=True, max_length=255) + owner = models.ForeignKey( + 'igny8_core_auth.User', + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='owned_accounts', + ) + stripe_customer_id = models.CharField(max_length=255, blank=True, null=True) + plan = models.ForeignKey('igny8_core_auth.Plan', on_delete=models.PROTECT, related_name='accounts') + credits = models.IntegerField(default=0, validators=[MinValueValidator(0)]) + status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='trial') + deletion_retention_days = models.PositiveIntegerField( + default=14, + validators=[MinValueValidator(1), MaxValueValidator(365)], + help_text="Retention window (days) before soft-deleted items are purged", + ) + + # Billing information + billing_email = models.EmailField(blank=True, null=True, help_text="Email for billing notifications") + billing_address_line1 = models.CharField(max_length=255, blank=True, help_text="Street address") + billing_address_line2 = models.CharField(max_length=255, blank=True, help_text="Apt, suite, etc.") + billing_city = models.CharField(max_length=100, blank=True) + billing_state = models.CharField(max_length=100, blank=True, help_text="State/Province/Region") + billing_postal_code = models.CharField(max_length=20, blank=True) + billing_country = models.CharField(max_length=2, blank=True, help_text="ISO 2-letter country code") + tax_id = models.CharField(max_length=100, blank=True, help_text="VAT/Tax ID number") + + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'igny8_tenants' + verbose_name = 'Account' + verbose_name_plural = 'Accounts' + indexes = [ + models.Index(fields=['slug']), + models.Index(fields=['status']), + ] + + objects = SoftDeleteManager() + all_objects = models.Manager() + + def __str__(self): + return self.name + + def is_system_account(self): + """Check if this account is a system account with highest access level.""" + # System accounts bypass all filtering restrictions + return self.slug in ['aws-admin', 'default-account', 'default'] + + def soft_delete(self, user=None, reason=None, retention_days=None): + if self.is_system_account(): + from django.core.exceptions import PermissionDenied + raise PermissionDenied("System account cannot be deleted.") + return super().soft_delete(user=user, reason=reason, retention_days=retention_days) + + def delete(self, using=None, keep_parents=False): + return self.soft_delete() + + +class Plan(models.Model): + """ + Subscription plan model - Phase 0: Credit-only system. + Plans define credits, billing, and account management limits only. + """ + BILLING_CYCLE_CHOICES = [ + ('monthly', 'Monthly'), + ('annual', 'Annual'), + ] + + # Plan Info + name = models.CharField(max_length=255) + slug = models.SlugField(unique=True, max_length=255) + price = models.DecimalField(max_digits=10, decimal_places=2) + billing_cycle = models.CharField(max_length=20, choices=BILLING_CYCLE_CHOICES, default='monthly') + features = models.JSONField(default=list, blank=True, help_text="Plan features as JSON array (e.g., ['ai_writer', 'image_gen', 'auto_publish'])") + is_active = models.BooleanField(default=True) + created_at = models.DateTimeField(auto_now_add=True) + + # Account Management Limits (kept - not operation limits) + max_users = models.IntegerField(default=1, validators=[MinValueValidator(1)], help_text="Total users allowed per account") + max_sites = models.IntegerField( + default=1, + validators=[MinValueValidator(1)], + help_text="Maximum number of sites allowed" + ) + max_industries = models.IntegerField(default=None, null=True, blank=True, validators=[MinValueValidator(1)], help_text="Optional limit for industries/sectors") + max_author_profiles = models.IntegerField(default=5, validators=[MinValueValidator(0)], help_text="Limit for saved writing styles") + + # Billing & Credits (Phase 0: Credit-only system) + included_credits = models.IntegerField(default=0, validators=[MinValueValidator(0)], help_text="Monthly credits included") + extra_credit_price = models.DecimalField(max_digits=10, decimal_places=2, default=0.01, help_text="Price per additional credit") + allow_credit_topup = models.BooleanField(default=True, help_text="Can user purchase more credits?") + auto_credit_topup_threshold = models.IntegerField(default=None, null=True, blank=True, validators=[MinValueValidator(0)], help_text="Auto top-up trigger point (optional)") + auto_credit_topup_amount = models.IntegerField(default=None, null=True, blank=True, validators=[MinValueValidator(1)], help_text="How many credits to auto-buy") + + # Stripe Integration + stripe_product_id = models.CharField(max_length=255, blank=True, null=True, help_text="For Stripe plan sync") + stripe_price_id = models.CharField(max_length=255, blank=True, null=True, help_text="Monthly price ID for Stripe") + + # Legacy field for backward compatibility + credits_per_month = models.IntegerField(default=0, validators=[MinValueValidator(0)], help_text="DEPRECATED: Use included_credits instead") + + class Meta: + db_table = 'igny8_plans' + ordering = ['price'] + + def __str__(self): + return self.name + + def clean(self): + """Validate plan limits.""" + from django.core.exceptions import ValidationError + if self.max_sites < 1: + raise ValidationError("max_sites must be >= 1") + if self.included_credits < 0: + raise ValidationError("included_credits must be >= 0") + + def get_effective_credits_per_month(self): + """Get effective credits per month (use included_credits if set, otherwise credits_per_month for backward compatibility).""" + return self.included_credits if self.included_credits > 0 else self.credits_per_month + + +class Subscription(models.Model): + """ + Account subscription model linking to Stripe. + """ + STATUS_CHOICES = [ + ('active', 'Active'), + ('past_due', 'Past Due'), + ('canceled', 'Canceled'), + ('trialing', 'Trialing'), + ] + + account = models.OneToOneField('igny8_core_auth.Account', on_delete=models.CASCADE, related_name='subscription', db_column='tenant_id') + stripe_subscription_id = models.CharField(max_length=255, unique=True) + status = models.CharField(max_length=20, choices=STATUS_CHOICES) + current_period_start = models.DateTimeField() + current_period_end = models.DateTimeField() + cancel_at_period_end = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'igny8_subscriptions' + indexes = [ + models.Index(fields=['status']), + ] + + def __str__(self): + return f"{self.account.name} - {self.status}" + + + +class Site(SoftDeletableModel, AccountBaseModel): + """ + Site model - Each account can have multiple sites based on their plan. + Each site belongs to ONE industry and can have 1-5 sectors from that industry. + """ + STATUS_CHOICES = [ + ('active', 'Active'), + ('inactive', 'Inactive'), + ('suspended', 'Suspended'), + ] + + name = models.CharField(max_length=255) + slug = models.SlugField(max_length=255) + domain = models.URLField(blank=True, null=True, help_text="Primary domain URL") + description = models.TextField(blank=True, null=True) + industry = models.ForeignKey( + 'igny8_core_auth.Industry', + on_delete=models.PROTECT, + related_name='sites', + null=True, + blank=True, + help_text="Industry this site belongs to" + ) + is_active = models.BooleanField(default=True, db_index=True) + status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='active') + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + # WordPress integration fields (legacy - use SiteIntegration instead) + wp_url = models.URLField(blank=True, null=True, help_text="WordPress site URL (legacy - use SiteIntegration)") + wp_username = models.CharField(max_length=255, blank=True, null=True) + wp_app_password = models.CharField(max_length=255, blank=True, null=True) + wp_api_key = models.CharField(max_length=255, blank=True, null=True, help_text="API key for WordPress integration via IGNY8 WP Bridge plugin") + + # Site type and hosting (Phase 6) + SITE_TYPE_CHOICES = [ + ('marketing', 'Marketing Site'), + ('ecommerce', 'Ecommerce Site'), + ('blog', 'Blog'), + ('portfolio', 'Portfolio'), + ('corporate', 'Corporate'), + ] + + HOSTING_TYPE_CHOICES = [ + ('igny8_sites', 'IGNY8 Sites'), + ('wordpress', 'WordPress'), + ('shopify', 'Shopify'), + ('multi', 'Multi-Destination'), + ] + + site_type = models.CharField( + max_length=50, + choices=SITE_TYPE_CHOICES, + default='marketing', + db_index=True, + help_text="Type of site" + ) + + hosting_type = models.CharField( + max_length=50, + choices=HOSTING_TYPE_CHOICES, + default='igny8_sites', + db_index=True, + help_text="Target hosting platform" + ) + + # SEO metadata (Phase 7) + seo_metadata = models.JSONField( + default=dict, + blank=True, + help_text="SEO metadata: meta tags, Open Graph, Schema.org" + ) + + objects = SoftDeleteManager() + all_objects = models.Manager() + + class Meta: + db_table = 'igny8_sites' + unique_together = [['account', 'slug']] # Slug unique per account + ordering = ['-created_at'] # Order by creation date for consistent pagination + indexes = [ + models.Index(fields=['account', 'is_active']), + models.Index(fields=['account', 'status']), + models.Index(fields=['industry']), + models.Index(fields=['site_type']), + models.Index(fields=['hosting_type']), + ] + + def __str__(self): + return f"{self.account.name} - {self.name}" + + + def get_active_sectors_count(self): + """Get count of active sectors for this site.""" + return self.sectors.filter(is_active=True).count() + + def get_max_sectors_limit(self): + """Get the maximum sectors allowed for this site based on plan, defaulting to 5 if not set.""" + try: + if self.account and self.account.plan and self.account.plan.max_industries is not None: + return self.account.plan.max_industries + except (AttributeError, Exception): + pass + # Default limit: 5 sectors per site + return 5 + + def can_add_sector(self): + """Check if site can add another sector based on plan limits.""" + return self.get_active_sectors_count() < self.get_max_sectors_limit() + + +class Industry(models.Model): + """ + Industry model - Global industry templates. + These are predefined industry definitions that sites can reference. + """ + name = models.CharField(max_length=255, unique=True) + slug = models.SlugField(unique=True, max_length=255, db_index=True) + description = models.TextField(blank=True, null=True) + is_active = models.BooleanField(default=True, db_index=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'igny8_industries' + ordering = ['name'] + verbose_name = 'Industry' + verbose_name_plural = 'Industries' + indexes = [ + models.Index(fields=['slug']), + models.Index(fields=['is_active']), + ] + + def __str__(self): + return self.name + + +class IndustrySector(models.Model): + """ + Industry Sector model - Sector templates within industries. + These define the available sectors for each industry. + """ + industry = models.ForeignKey('igny8_core_auth.Industry', on_delete=models.CASCADE, related_name='sectors') + name = models.CharField(max_length=255) + slug = models.SlugField(max_length=255, db_index=True) + description = models.TextField(blank=True, null=True) + suggested_keywords = models.JSONField(default=list, help_text='List of suggested keywords for this sector template') + is_active = models.BooleanField(default=True, db_index=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'igny8_industry_sectors' + unique_together = [['industry', 'slug']] # Slug unique per industry + verbose_name = 'Industry Sector' + verbose_name_plural = 'Industry Sectors' + indexes = [ + models.Index(fields=['industry', 'is_active']), + models.Index(fields=['slug']), + ] + ordering = ['industry', 'name'] + + def __str__(self): + return f"{self.industry.name} - {self.name}" + + +class SeedKeyword(models.Model): + """ + Global, permanent keyword suggestions scoped by industry + sector. + These are canonical keywords that can be imported into account-specific Keywords. + Non-deletable global reference data. + """ + INTENT_CHOICES = [ + ('informational', 'Informational'), + ('navigational', 'Navigational'), + ('commercial', 'Commercial'), + ('transactional', 'Transactional'), + ] + + keyword = models.CharField(max_length=255, db_index=True) + industry = models.ForeignKey('igny8_core_auth.Industry', on_delete=models.CASCADE, related_name='seed_keywords') + sector = models.ForeignKey('igny8_core_auth.IndustrySector', on_delete=models.CASCADE, related_name='seed_keywords') + volume = models.IntegerField(default=0, help_text='Search volume estimate') + difficulty = models.IntegerField( + default=0, + validators=[MinValueValidator(0), MaxValueValidator(100)], + help_text='Keyword difficulty (0-100)' + ) + intent = models.CharField(max_length=50, choices=INTENT_CHOICES, default='informational') + is_active = models.BooleanField(default=True, db_index=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'igny8_seed_keywords' + unique_together = [['keyword', 'industry', 'sector']] + verbose_name = 'Seed Keyword' + verbose_name_plural = 'Global Keywords Database' + indexes = [ + models.Index(fields=['keyword']), + models.Index(fields=['industry', 'sector']), + models.Index(fields=['industry', 'sector', 'is_active']), + models.Index(fields=['intent']), + ] + ordering = ['keyword'] + + def __str__(self): + return f"{self.keyword} ({self.industry.name} - {self.sector.name})" + + +class Sector(SoftDeletableModel, AccountBaseModel): + """ + Sector model - Each site can have 1-5 sectors. + Sectors are site-specific instances that reference an IndustrySector template. + Sectors contain keywords and clusters. + """ + STATUS_CHOICES = [ + ('active', 'Active'), + ('inactive', 'Inactive'), + ] + + site = models.ForeignKey('igny8_core_auth.Site', on_delete=models.CASCADE, related_name='sectors') + industry_sector = models.ForeignKey( + 'igny8_core_auth.IndustrySector', + on_delete=models.PROTECT, + related_name='site_sectors', + null=True, + blank=True, + help_text="Reference to the industry sector template" + ) + name = models.CharField(max_length=255) + slug = models.SlugField(max_length=255) + description = models.TextField(blank=True, null=True) + is_active = models.BooleanField(default=True, db_index=True) + status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='active') + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + objects = SoftDeleteManager() + all_objects = models.Manager() + + class Meta: + db_table = 'igny8_sectors' + unique_together = [['site', 'slug']] # Slug unique per site + indexes = [ + models.Index(fields=['site', 'is_active']), + models.Index(fields=['account', 'site']), + models.Index(fields=['industry_sector']), + ] + + def __str__(self): + return f"{self.site.name} - {self.name}" + + @property + def industry(self): + """Get the industry for this sector.""" + return self.industry_sector.industry if self.industry_sector else None + + def save(self, *args, **kwargs): + """Ensure site belongs to same account, validate sector limit, and industry match.""" + # Set account from site + if self.site: + self.account = self.site.account + + # Validate that sector's industry_sector belongs to site's industry + if self.site and self.site.industry and self.industry_sector: + if self.industry_sector.industry != self.site.industry: + from django.core.exceptions import ValidationError + raise ValidationError( + f"Sector must belong to site's industry ({self.site.industry.name}). " + f"Selected sector belongs to {self.industry_sector.industry.name}." + ) + + super().save(*args, **kwargs) + + # Validate sector limit based on plan - only for new active sectors + if self.is_active: + max_sectors = self.site.get_max_sectors_limit() + if self.site.get_active_sectors_count() > max_sectors: + from django.core.exceptions import ValidationError + raise ValidationError(f"Maximum {max_sectors} sectors allowed per site for this plan") + + +class SiteUserAccess(models.Model): + """ + Many-to-many relationship between Users and Sites. + Controls which users can access which sites. + Owners and Admins have access to all sites automatically. + """ + user = models.ForeignKey('igny8_core_auth.User', on_delete=models.CASCADE, related_name='site_access') + site = models.ForeignKey('igny8_core_auth.Site', on_delete=models.CASCADE, related_name='user_access') + granted_at = models.DateTimeField(auto_now_add=True) + granted_by = models.ForeignKey( + 'igny8_core_auth.User', + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name='granted_site_accesses' + ) + + class Meta: + db_table = 'igny8_site_user_access' + unique_together = [['user', 'site']] + verbose_name = 'Site User Access' + verbose_name_plural = 'Site User Access' + indexes = [ + models.Index(fields=['user', 'site']), + ] + + def __str__(self): + return f"{self.user.email} -> {self.site.name}" + + +class PasswordResetToken(models.Model): + """Password reset token model for password reset flow""" + user = models.ForeignKey('igny8_core_auth.User', on_delete=models.CASCADE, related_name='password_reset_tokens') + token = models.CharField(max_length=255, unique=True, db_index=True) + expires_at = models.DateTimeField() + used = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + db_table = 'igny8_password_reset_tokens' + indexes = [ + models.Index(fields=['token']), + models.Index(fields=['user', 'used']), + models.Index(fields=['expires_at']), + ] + ordering = ['-created_at'] + + def __str__(self): + return f"Password reset token for {self.user.email}" + + def is_valid(self): + """Check if token is valid (not used and not expired)""" + from django.utils import timezone + return not self.used and self.expires_at > timezone.now() + + +class User(AbstractUser): + """ + Custom user model with account relationship and role support. + """ + ROLE_CHOICES = [ + ('developer', 'Developer / Super Admin'), + ('owner', 'Owner'), + ('admin', 'Admin'), + ('editor', 'Editor'), + ('viewer', 'Viewer'), + ('system_bot', 'System Bot'), + ] + + account = models.ForeignKey('igny8_core_auth.Account', on_delete=models.CASCADE, related_name='users', null=True, blank=True, db_column='tenant_id') + role = models.CharField(max_length=20, choices=ROLE_CHOICES, default='viewer') + email = models.EmailField(_('email address'), unique=True) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + + USERNAME_FIELD = 'email' + REQUIRED_FIELDS = ['username'] + + class Meta: + db_table = 'igny8_users' + indexes = [ + models.Index(fields=['account', 'role']), + models.Index(fields=['email']), + ] + + def __str__(self): + return self.email + + def has_role(self, *roles): + """Check if user has any of the specified roles.""" + return self.role in roles + + def is_owner_or_admin(self): + """Check if user is owner or admin.""" + return self.role in ['owner', 'admin'] + + def is_developer(self): + """Check if user is a developer/super admin with full access.""" + return self.role == 'developer' or self.is_superuser + + def is_admin_or_developer(self): + """Check if user is admin or developer with override privileges.""" + # ADMIN/DEV OVERRIDE: Both admin and developer roles bypass account/site/sector restrictions + return self.role in ['admin', 'developer'] or self.is_superuser + + def is_system_account_user(self): + """Check if user belongs to a system account with highest access level.""" + try: + return self.account and self.account.is_system_account() + except (AttributeError, Exception): + # If account access fails (e.g., column mismatch), return False + return False + + def get_accessible_sites(self): + """Get all sites the user can access.""" + # System account users can access all sites across all accounts + if self.is_system_account_user(): + return Site.objects.filter(is_active=True).distinct() + + # Developers/super admins can access all sites across all accounts + # ADMIN/DEV OVERRIDE: Admins also bypass account restrictions (see is_admin_or_developer) + if self.is_developer(): + return Site.objects.filter(is_active=True).distinct() + + try: + if not self.account: + return Site.objects.none() + + # Owners and admins can access all sites in their account + if self.role in ['owner', 'admin']: + return Site.objects.filter(account=self.account, is_active=True) + + # Other users can only access sites explicitly granted via SiteUserAccess + return Site.objects.filter( + account=self.account, + is_active=True, + user_access__user=self + ).distinct() + except (AttributeError, Exception): + # If account access fails (e.g., column mismatch), return empty queryset + return Site.objects.none() + diff --git a/tenant/backend/igny8_core/auth/permissions.py b/tenant/backend/igny8_core/auth/permissions.py new file mode 100644 index 00000000..a81932e5 --- /dev/null +++ b/tenant/backend/igny8_core/auth/permissions.py @@ -0,0 +1,77 @@ +""" +Role-Based Access Control (RBAC) Permissions +""" +from rest_framework import permissions + + +class IsOwnerOrAdmin(permissions.BasePermission): + """Allow access only to owners and admins.""" + + def has_permission(self, request, view): + user = getattr(request, "user", None) + if not user or not user.is_authenticated: + return False + if getattr(user, "is_superuser", False): + return True + return user.role in ['owner', 'admin', 'developer'] + + +class IsEditorOrAbove(permissions.BasePermission): + """Allow access to editors, admins, and owners.""" + + def has_permission(self, request, view): + user = getattr(request, "user", None) + if not user or not user.is_authenticated: + return False + if getattr(user, "is_superuser", False): + return True + return user.role in ['owner', 'admin', 'editor', 'developer'] + + +class IsViewerOrAbove(permissions.BasePermission): + """Allow access to all authenticated users.""" + + def has_permission(self, request, view): + user = getattr(request, "user", None) + if not user or not user.is_authenticated: + return False + return True + + +class AccountPermission(permissions.BasePermission): + """Ensure user belongs to the account being accessed.""" + + def has_permission(self, request, view): + if not request.user or not request.user.is_authenticated: + return False + + # System bots can access all accounts + if request.user.role == 'system_bot': + return True + + # Users must have an account + user_account = getattr(request.user, 'account', None) + if not user_account: + return False + + # For now, allow access if user has account (will be refined with object-level checks) + return True + + def has_object_permission(self, request, view, obj): + if not request.user or not request.user.is_authenticated: + return False + + # System bots can access all + if request.user.role == 'system_bot': + return True + + # Check if object has account and it matches user's account + obj_account = getattr(obj, 'account', None) + user_account = getattr(request.user, 'account', None) + if obj_account: + return obj_account == user_account + + # If no account on object, allow (for non-account models) + return True + + diff --git a/tenant/backend/igny8_core/auth/views.py b/tenant/backend/igny8_core/auth/views.py new file mode 100644 index 00000000..7a87aa0a --- /dev/null +++ b/tenant/backend/igny8_core/auth/views.py @@ -0,0 +1,1536 @@ +""" +Authentication Views - Structured as: Groups, Users, Accounts, Subscriptions, Site User Access +Unified API Standard v1.0 compliant +""" +from rest_framework import viewsets, status, permissions, filters +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework.views import APIView +from django.contrib.auth import authenticate +from django.utils import timezone +from django.db import transaction +from django_filters.rest_framework import DjangoFilterBackend +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import AccountModelViewSet +from igny8_core.api.authentication import JWTAuthentication, CSRFExemptSessionAuthentication +from igny8_core.api.response import success_response, error_response +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.pagination import CustomPageNumberPagination +from igny8_core.api.permissions import IsAuthenticatedAndActive, HasTenantAccess +from .models import User, Account, Plan, Subscription, Site, Sector, SiteUserAccess, Industry, IndustrySector, SeedKeyword +from .serializers import ( + UserSerializer, AccountSerializer, PlanSerializer, SubscriptionSerializer, + RegisterSerializer, LoginSerializer, ChangePasswordSerializer, + SiteSerializer, SectorSerializer, SiteUserAccessSerializer, + IndustrySerializer, IndustrySectorSerializer, SeedKeywordSerializer, + RefreshTokenSerializer, RequestPasswordResetSerializer, ResetPasswordSerializer +) +from .permissions import IsOwnerOrAdmin, IsEditorOrAbove +from .utils import generate_access_token, generate_refresh_token, get_token_expiry, decode_token +from .models import PasswordResetToken +import jwt + + +# ============================================================================ +# 1. GROUPS - Define user roles and permissions across the system +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), +) +class GroupsViewSet(viewsets.ViewSet): + """ + ViewSet for managing user roles and permissions (Groups). + Groups are defined by the User.ROLE_CHOICES. + Unified API Standard v1.0 compliant + """ + permission_classes = [IsOwnerOrAdmin] + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + def list(self, request): + """List all available roles/groups.""" + roles = [ + { + 'id': 'developer', + 'name': 'Developer / Super Admin', + 'description': 'Full access across all accounts (bypasses all filters)', + 'permissions': ['full_access', 'bypass_filters', 'all_modules'] + }, + { + 'id': 'owner', + 'name': 'Owner', + 'description': 'Full account access, billing, automation', + 'permissions': ['account_management', 'billing', 'automation', 'all_sites'] + }, + { + 'id': 'admin', + 'name': 'Admin', + 'description': 'Manage content modules, view billing (no edit)', + 'permissions': ['content_management', 'view_billing', 'all_sites'] + }, + { + 'id': 'editor', + 'name': 'Editor', + 'description': 'Generate AI content, manage clusters/tasks', + 'permissions': ['ai_content', 'manage_clusters', 'manage_tasks', 'assigned_sites'] + }, + { + 'id': 'viewer', + 'name': 'Viewer', + 'description': 'Read-only dashboards', + 'permissions': ['read_only', 'assigned_sites'] + }, + { + 'id': 'system_bot', + 'name': 'System Bot', + 'description': 'System automation user', + 'permissions': ['automation_only'] + } + ] + return success_response(data={'groups': roles}, request=request) + + @action(detail=False, methods=['get'], url_path='permissions') + def permissions(self, request): + """Get permissions for a specific role.""" + role = request.query_params.get('role') + if not role: + return error_response( + error='role parameter is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + role_permissions = { + 'developer': ['full_access', 'bypass_filters', 'all_modules', 'all_accounts'], + 'owner': ['account_management', 'billing', 'automation', 'all_sites', 'user_management'], + 'admin': ['content_management', 'view_billing', 'all_sites', 'user_management'], + 'editor': ['ai_content', 'manage_clusters', 'manage_tasks', 'assigned_sites'], + 'viewer': ['read_only', 'assigned_sites'], + 'system_bot': ['automation_only'] + } + + permissions_list = role_permissions.get(role, []) + return success_response( + data={ + 'role': role, + 'permissions': permissions_list + }, + request=request + ) + + +# ============================================================================ +# 2. USERS - Manage global user records and credentials +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class UsersViewSet(AccountModelViewSet): + """ + ViewSet for managing global user records and credentials. + Users are global, but belong to accounts. + Unified API Standard v1.0 compliant + """ + queryset = User.objects.all() + serializer_class = UserSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsOwnerOrAdmin] + pagination_class = CustomPageNumberPagination + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + def get_queryset(self): + """Return users based on access level.""" + user = self.request.user + if not user or not user.is_authenticated: + return User.objects.none() + + # Developers can see all users + if user.is_developer(): + return User.objects.all() + + # Owners/Admins can see users in their account + if user.role in ['owner', 'admin'] and user.account: + return User.objects.filter(account=user.account) + + # Others can only see themselves + return User.objects.filter(id=user.id) + + @action(detail=False, methods=['post']) + def create_user(self, request): + """Create a new user (separate from registration).""" + from django.contrib.auth.password_validation import validate_password + + email = request.data.get('email') + username = request.data.get('username') + password = request.data.get('password') + role = request.data.get('role', 'viewer') + account_id = request.data.get('account_id') + + if not email or not username or not password: + return error_response( + error='email, username, and password are required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Validate password + try: + validate_password(password) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account + account = None + if account_id: + try: + account = Account.objects.get(id=account_id) + except Account.DoesNotExist: + return error_response( + error=f'Account with id {account_id} does not exist', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + else: + # Use current user's account + if request.user.account: + account = request.user.account + + # Create user + try: + user = User.objects.create_user( + username=username, + email=email, + password=password, + role=role, + account=account + ) + serializer = UserSerializer(user) + return success_response( + data={'user': serializer.data}, + status_code=status.HTTP_201_CREATED, + request=request + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + @action(detail=True, methods=['post']) + def update_role(self, request, pk=None): + """Update user role.""" + user = self.get_object() + new_role = request.data.get('role') + + if not new_role: + return error_response( + error='role is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + if new_role not in [choice[0] for choice in User.ROLE_CHOICES]: + return error_response( + error=f'Invalid role. Must be one of: {[c[0] for c in User.ROLE_CHOICES]}', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + user.role = new_role + user.save() + + serializer = UserSerializer(user) + return success_response(data={'user': serializer.data}, request=request) + + +# ============================================================================ +# 3. ACCOUNTS - Register each unique organization/user space +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class AccountsViewSet(AccountModelViewSet): + """ + ViewSet for managing accounts (unique organization/user spaces). + Unified API Standard v1.0 compliant + """ + queryset = Account.objects.all() + serializer_class = AccountSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsOwnerOrAdmin] + pagination_class = CustomPageNumberPagination + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + def get_queryset(self): + """Return accounts based on access level.""" + user = self.request.user + if not user or not user.is_authenticated: + return Account.objects.none() + + # Developers can see all accounts + if user.is_developer(): + return Account.objects.all() + + # Owners can see their own accounts + if user.role == 'owner': + return Account.objects.filter(owner=user) + + # Admins can see their account + if user.role == 'admin' and user.account: + return Account.objects.filter(id=user.account.id) + + return Account.objects.none() + + def perform_create(self, serializer): + """Create account with owner.""" + user = self.request.user + + # plan_id is mapped to plan in serializer (source='plan') + plan = serializer.validated_data.get('plan') + + if not plan: + from rest_framework.exceptions import ValidationError + raise ValidationError("plan_id is required") + + # Set owner to current user if not provided + owner = serializer.validated_data.get('owner') + if not owner: + owner = user + + account = serializer.save(plan=plan, owner=owner) + return account + + + +# ============================================================================ +# 4. SUBSCRIPTIONS - Control plan level, limits, and billing per account +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class SubscriptionsViewSet(AccountModelViewSet): + """ + ViewSet for managing subscriptions (plan level, limits, billing per account). + Unified API Standard v1.0 compliant + """ + queryset = Subscription.objects.all() + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsOwnerOrAdmin] + pagination_class = CustomPageNumberPagination + # Use relaxed auth throttle to avoid 429s during onboarding plan fetches + throttle_scope = 'auth_read' + throttle_classes = [DebugScopedRateThrottle] + + def get_queryset(self): + """Return subscriptions based on access level.""" + user = self.request.user + if not user or not user.is_authenticated: + return Subscription.objects.none() + + # Developers can see all subscriptions + if user.is_developer(): + return Subscription.objects.all() + + # Owners/Admins can see subscriptions for their account + if user.role in ['owner', 'admin'] and user.account: + return Subscription.objects.filter(account=user.account) + + return Subscription.objects.none() + + def get_serializer_class(self): + """Return appropriate serializer.""" + return SubscriptionSerializer + + @action(detail=False, methods=['get'], url_path='by-account/(?P[^/.]+)') + def by_account(self, request, account_id=None): + """Get subscription for a specific account.""" + try: + subscription = Subscription.objects.get(account_id=account_id) + serializer = self.get_serializer(subscription) + return success_response( + data={'subscription': serializer.data}, + request=request + ) + except Subscription.DoesNotExist: + return error_response( + error='Subscription not found for this account', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + +# ============================================================================ +# 5. SITE USER ACCESS - Assign users access to specific sites within account +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class SiteUserAccessViewSet(AccountModelViewSet): + """ + ViewSet for managing Site-User access permissions. + Assign users access to specific sites within their account. + Unified API Standard v1.0 compliant + """ + serializer_class = SiteUserAccessSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsOwnerOrAdmin] + pagination_class = CustomPageNumberPagination + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + def get_queryset(self): + """Return access records for sites in user's account.""" + user = self.request.user + if not user or not user.is_authenticated: + return SiteUserAccess.objects.none() + + # Developers can see all access records + if user.is_developer(): + return SiteUserAccess.objects.all() + + if not user.account: + return SiteUserAccess.objects.none() + + # Return access records for sites in user's account + return SiteUserAccess.objects.filter(site__account=user.account) + + def perform_create(self, serializer): + """Create site user access with granted_by.""" + user = self.request.user + serializer.save(granted_by=user) + + +# ============================================================================ +# SUPPORTING VIEWSETS (Sites, Sectors, Industries, Plans, Auth) +# ============================================================================ + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), +) +class PlanViewSet(viewsets.ReadOnlyModelViewSet): + """ + ViewSet for listing active subscription plans. + Unified API Standard v1.0 compliant + """ + queryset = Plan.objects.filter(is_active=True) + serializer_class = PlanSerializer + permission_classes = [permissions.AllowAny] + pagination_class = CustomPageNumberPagination + # Plans are public and should not throttle aggressively to avoid blocking signup/onboarding + throttle_scope = None + throttle_classes: list = [] + + def retrieve(self, request, *args, **kwargs): + """Override retrieve to return unified format""" + try: + instance = self.get_object() + serializer = self.get_serializer(instance) + return success_response(data=serializer.data, request=request) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class SiteViewSet(AccountModelViewSet): + """ViewSet for managing Sites.""" + serializer_class = SiteSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsEditorOrAbove] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + + def get_permissions(self): + """Allow normal users (viewer) to create sites, but require editor+ for other operations.""" + # Allow public read access for list requests with slug filter (used by Sites Renderer) + if self.action == 'list' and self.request.query_params.get('slug'): + from rest_framework.permissions import AllowAny + return [AllowAny()] + if self.action == 'create': + return [permissions.IsAuthenticated()] + return [IsEditorOrAbove()] + + def get_queryset(self): + """Return sites accessible to the current user.""" + # If this is a public request (no auth) with slug filter, return site by slug + if not self.request.user or not self.request.user.is_authenticated: + slug = self.request.query_params.get('slug') + if slug: + # Return queryset directly from model (bypassing base class account filtering) + return Site.objects.filter(slug=slug, is_active=True) + return Site.objects.none() + + user = self.request.user + + # ADMIN/DEV OVERRIDE: Both admins and developers can see all sites + if user.is_admin_or_developer(): + return Site.objects.all().distinct() + + # Get account from user + account = getattr(user, 'account', None) + if not account: + return Site.objects.none() + + if user.role in ['owner', 'admin']: + return Site.objects.filter(account=account) + + return Site.objects.filter( + account=account, + user_access__user=user + ).distinct() + + def perform_create(self, serializer): + """Create site with account.""" + account = getattr(self.request, 'account', None) + if not account: + user = self.request.user + if user and user.is_authenticated: + account = getattr(user, 'account', None) + + # Multiple sites can be active simultaneously - no constraint + serializer.save(account=account) + + def perform_update(self, serializer): + """Update site.""" + account = getattr(self.request, 'account', None) + if not account: + account = getattr(serializer.instance, 'account', None) + + # Multiple sites can be active simultaneously - no constraint + serializer.save() + + @action(detail=True, methods=['get']) + def sectors(self, request, pk=None): + """Get all sectors for this site.""" + site = self.get_object() + sectors = site.sectors.filter(is_active=True) + serializer = SectorSerializer(sectors, many=True) + return success_response( + data=serializer.data, + request=request + ) + + @action(detail=True, methods=['post'], url_path='set_active') + def set_active(self, request, pk=None): + """Set this site as active (multiple sites can be active simultaneously).""" + site = self.get_object() + + # Simply activate this site - no need to deactivate others + site.is_active = True + site.status = 'active' + site.save() + + serializer = self.get_serializer(site) + return success_response( + data={'site': serializer.data}, + message=f'Site "{site.name}" is now active', + request=request + ) + + @action(detail=True, methods=['post'], url_path='select_sectors') + def select_sectors(self, request, pk=None): + """Select industry and sectors for this site.""" + import logging + logger = logging.getLogger(__name__) + + try: + site = self.get_object() + except Exception as e: + logger.error(f"Error getting site object: {str(e)}", exc_info=True) + return error_response( + error=f'Site not found: {str(e)}', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + sector_slugs = request.data.get('sector_slugs', []) + industry_slug = request.data.get('industry_slug') + + if not industry_slug: + return error_response( + error='Industry slug is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + industry = Industry.objects.get(slug=industry_slug, is_active=True) + except Industry.DoesNotExist: + return error_response( + error=f'Industry with slug "{industry_slug}" not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + site.industry = industry + site.save() + + if not sector_slugs: + return success_response( + data={ + 'site': SiteSerializer(site).data, + 'sectors': [] + }, + message=f'Industry "{industry.name}" set for site. No sectors selected.', + request=request + ) + + # Get plan's max_industries limit (if set), otherwise default to 5 + max_sectors = site.get_max_sectors_limit() + + if len(sector_slugs) > max_sectors: + return error_response( + error=f'Maximum {max_sectors} sectors allowed per site for this plan', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + created_sectors = [] + updated_sectors = [] + + existing_sector_slugs = set(sector_slugs) + site.sectors.exclude(slug__in=existing_sector_slugs).update(is_active=False) + + industry_sectors_map = {} + for sector_slug in sector_slugs: + industry_sector = IndustrySector.objects.filter( + industry=industry, + slug=sector_slug, + is_active=True + ).first() + + if not industry_sector: + return error_response( + error=f'Sector "{sector_slug}" not found in industry "{industry.name}"', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + industry_sectors_map[sector_slug] = industry_sector + + for sector_slug, industry_sector in industry_sectors_map.items(): + try: + # Check if site has account before proceeding + if not site.account: + logger.error(f"Site {site.id} has no account assigned") + return error_response( + error=f'Site "{site.name}" has no account assigned. Please contact support.', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + # Create or get sector - account will be set automatically in save() method + # But we need to pass it in defaults for get_or_create to work + sector, created = Sector.objects.get_or_create( + site=site, + slug=sector_slug, + defaults={ + 'industry_sector': industry_sector, + 'name': industry_sector.name, + 'description': industry_sector.description or '', + 'is_active': True, + 'status': 'active', + 'account': site.account # Pass the account object, not the ID + } + ) + + if not created: + # Update existing sector + sector.industry_sector = industry_sector + sector.name = industry_sector.name + sector.description = industry_sector.description or '' + sector.is_active = True + sector.status = 'active' + # Ensure account is set (save() will also set it, but be explicit) + if not sector.account: + sector.account = site.account + sector.save() + updated_sectors.append(sector) + else: + created_sectors.append(sector) + except Exception as e: + logger.error(f"Error creating/updating sector {sector_slug}: {str(e)}", exc_info=True) + return error_response( + error=f'Failed to create/update sector "{sector_slug}": {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + # Get plan's max_industries limit (if set), otherwise default to 5 + max_sectors = site.get_max_sectors_limit() + + if site.get_active_sectors_count() > max_sectors: + return error_response( + error=f'Maximum {max_sectors} sectors allowed per site for this plan', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + serializer = SectorSerializer(site.sectors.filter(is_active=True), many=True) + return success_response( + data={ + 'created_count': len(created_sectors), + 'updated_count': len(updated_sectors), + 'sectors': serializer.data, + 'site': SiteSerializer(site).data + }, + message=f'Selected {len(sector_slugs)} sectors from industry "{industry.name}".', + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + create=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), + update=extend_schema(tags=['Authentication']), + partial_update=extend_schema(tags=['Authentication']), + destroy=extend_schema(tags=['Authentication']), +) +class SectorViewSet(AccountModelViewSet): + """ViewSet for managing Sectors.""" + serializer_class = SectorSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsEditorOrAbove] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + + def get_queryset(self): + """Return sectors from sites accessible to the current user.""" + user = self.request.user + if not user or not user.is_authenticated: + return Sector.objects.none() + + # ADMIN/DEV OVERRIDE: Both admins and developers can see all sectors across all sites + if user.is_admin_or_developer(): + return Sector.objects.all().distinct() + + accessible_sites = user.get_accessible_sites() + return Sector.objects.filter(site__in=accessible_sites) + + def get_queryset_with_site_filter(self): + """Get queryset, optionally filtered by site_id.""" + queryset = self.get_queryset() + site_id = self.request.query_params.get('site_id') + if site_id: + queryset = queryset.filter(site_id=site_id) + return queryset + + def list(self, request, *args, **kwargs): + """Override list to apply site filter.""" + queryset = self.get_queryset_with_site_filter() + serializer = self.get_serializer(queryset, many=True) + return success_response( + data=serializer.data, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), +) +class IndustryViewSet(viewsets.ReadOnlyModelViewSet): + """ + ViewSet for industry templates. + Unified API Standard v1.0 compliant + """ + queryset = Industry.objects.filter(is_active=True).prefetch_related('sectors') + serializer_class = IndustrySerializer + permission_classes = [permissions.AllowAny] + pagination_class = CustomPageNumberPagination + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + def list(self, request): + """Get all industries with their sectors.""" + industries = self.get_queryset() + serializer = self.get_serializer(industries, many=True) + return success_response( + data={'industries': serializer.data}, + request=request + ) + + def retrieve(self, request, *args, **kwargs): + """Override retrieve to return unified format""" + try: + instance = self.get_object() + serializer = self.get_serializer(instance) + return success_response(data=serializer.data, request=request) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Authentication']), + retrieve=extend_schema(tags=['Authentication']), +) +class SeedKeywordViewSet(viewsets.ReadOnlyModelViewSet): + """ + ViewSet for SeedKeyword - Global reference data (read-only for non-admins). + Unified API Standard v1.0 compliant + """ + queryset = SeedKeyword.objects.filter(is_active=True).select_related('industry', 'sector') + serializer_class = SeedKeywordSerializer + permission_classes = [permissions.AllowAny] # Read-only, allow any authenticated user + pagination_class = CustomPageNumberPagination + throttle_scope = 'auth' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [filters.SearchFilter, filters.OrderingFilter, DjangoFilterBackend] + search_fields = ['keyword'] + ordering_fields = ['keyword', 'volume', 'difficulty', 'created_at'] + ordering = ['keyword'] + filterset_fields = ['industry', 'sector', 'intent', 'is_active'] + + def retrieve(self, request, *args, **kwargs): + """Override retrieve to return unified format""" + try: + instance = self.get_object() + serializer = self.get_serializer(instance) + return success_response(data=serializer.data, request=request) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + def get_queryset(self): + """Filter by industry and sector if provided.""" + queryset = super().get_queryset() + industry_id = self.request.query_params.get('industry_id') + industry_name = self.request.query_params.get('industry_name') + sector_id = self.request.query_params.get('sector_id') + sector_name = self.request.query_params.get('sector_name') + + if industry_id: + queryset = queryset.filter(industry_id=industry_id) + if industry_name: + queryset = queryset.filter(industry__name__icontains=industry_name) + if sector_id: + queryset = queryset.filter(sector_id=sector_id) + if sector_name: + queryset = queryset.filter(sector__name__icontains=sector_name) + + return queryset + + @action(detail=False, methods=['post'], url_path='import_seed_keywords', url_name='import_seed_keywords') + def import_seed_keywords(self, request): + """ + Import seed keywords from CSV (Admin/Superuser only). + Expected columns: keyword, industry_name, sector_name, volume, difficulty, intent + """ + import csv + from django.db import transaction + + # Check admin/superuser permission + if not (request.user.is_staff or request.user.is_superuser): + return error_response( + error='Admin or superuser access required', + status_code=status.HTTP_403_FORBIDDEN, + request=request + ) + + if 'file' not in request.FILES: + return error_response( + error='No file provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + file = request.FILES['file'] + if not file.name.endswith('.csv'): + return error_response( + error='File must be a CSV', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + # Parse CSV + decoded_file = file.read().decode('utf-8') + csv_reader = csv.DictReader(decoded_file.splitlines()) + + imported_count = 0 + skipped_count = 0 + errors = [] + + with transaction.atomic(): + for row_num, row in enumerate(csv_reader, start=2): # Start at 2 (header is row 1) + try: + keyword_text = row.get('keyword', '').strip() + industry_name = row.get('industry_name', '').strip() + sector_name = row.get('sector_name', '').strip() + + if not all([keyword_text, industry_name, sector_name]): + skipped_count += 1 + continue + + # Get or create industry + industry = Industry.objects.filter(name=industry_name).first() + if not industry: + errors.append(f"Row {row_num}: Industry '{industry_name}' not found") + skipped_count += 1 + continue + + # Get or create industry sector + sector = IndustrySector.objects.filter( + industry=industry, + name=sector_name + ).first() + if not sector: + errors.append(f"Row {row_num}: Sector '{sector_name}' not found for industry '{industry_name}'") + skipped_count += 1 + continue + + # Check if keyword already exists + existing = SeedKeyword.objects.filter( + keyword=keyword_text, + industry=industry, + sector=sector + ).first() + + if existing: + skipped_count += 1 + continue + + # Create seed keyword + SeedKeyword.objects.create( + keyword=keyword_text, + industry=industry, + sector=sector, + volume=int(row.get('volume', 0) or 0), + difficulty=int(row.get('difficulty', 0) or 0), + intent=row.get('intent', 'informational') or 'informational', + is_active=True + ) + imported_count += 1 + + except Exception as e: + errors.append(f"Row {row_num}: {str(e)}") + skipped_count += 1 + + return success_response( + data={ + 'imported': imported_count, + 'skipped': skipped_count, + 'errors': errors[:10] if errors else [] # Limit errors to first 10 + }, + message=f'Import completed: {imported_count} keywords imported, {skipped_count} skipped', + request=request + ) + + except Exception as e: + return error_response( + error=f'Failed to import keywords: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + +# ============================================================================ +# AUTHENTICATION ENDPOINTS (Register, Login, Change Password, Me) +# ============================================================================ + +@extend_schema_view( + register=extend_schema(tags=['Authentication']), + login=extend_schema(tags=['Authentication']), + change_password=extend_schema(tags=['Authentication']), + refresh_token=extend_schema(tags=['Authentication']), +) +class AuthViewSet(viewsets.GenericViewSet): + """Authentication endpoints. + Unified API Standard v1.0 compliant + """ + permission_classes = [permissions.AllowAny] + throttle_scope = 'auth_strict' + throttle_classes = [DebugScopedRateThrottle] + + @action(detail=False, methods=['post']) + def register(self, request): + """User registration endpoint.""" + serializer = RegisterSerializer(data=request.data) + if serializer.is_valid(): + user = serializer.save() + + # Log the user in (create session for session authentication) + from django.contrib.auth import login + login(request, user) + + # Get account from user + account = getattr(user, 'account', None) + + # Generate JWT tokens + access_token = generate_access_token(user, account) + refresh_token = generate_refresh_token(user, account) + access_expires_at = get_token_expiry('access') + refresh_expires_at = get_token_expiry('refresh') + + user_serializer = UserSerializer(user) + return success_response( + data={ + 'user': user_serializer.data, + 'tokens': { + 'access': access_token, + 'refresh': refresh_token, + 'access_expires_at': access_expires_at.isoformat(), + 'refresh_expires_at': refresh_expires_at.isoformat(), + } + }, + message='Registration successful', + status_code=status.HTTP_201_CREATED, + request=request + ) + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + @action(detail=False, methods=['post']) + def login(self, request): + """User login endpoint.""" + serializer = LoginSerializer(data=request.data) + if serializer.is_valid(): + email = serializer.validated_data['email'] + password = serializer.validated_data['password'] + + try: + user = User.objects.select_related('account', 'account__plan').get(email=email) + except User.DoesNotExist: + return error_response( + error='Invalid credentials', + status_code=status.HTTP_401_UNAUTHORIZED, + request=request + ) + + if user.check_password(password): + # Ensure user has an account + account = getattr(user, 'account', None) + if account is None: + return error_response( + error='Account not configured for this user. Please contact support.', + status_code=status.HTTP_403_FORBIDDEN, + request=request, + ) + + # Ensure account has an active plan + plan = getattr(account, 'plan', None) + if plan is None or getattr(plan, 'is_active', False) is False: + return error_response( + error='Active subscription required. Visit igny8.com/pricing to subscribe.', + status_code=status.HTTP_402_PAYMENT_REQUIRED, + request=request, + ) + + # Log the user in (create session for session authentication) + from django.contrib.auth import login + login(request, user) + + # Generate JWT tokens + access_token = generate_access_token(user, account) + refresh_token = generate_refresh_token(user, account) + access_expires_at = get_token_expiry('access') + refresh_expires_at = get_token_expiry('refresh') + + user_serializer = UserSerializer(user) + return success_response( + data={ + 'user': user_serializer.data, + 'access': access_token, + 'refresh': refresh_token, + 'access_expires_at': access_expires_at.isoformat(), + 'refresh_expires_at': refresh_expires_at.isoformat(), + }, + message='Login successful', + request=request + ) + + return error_response( + error='Invalid credentials', + status_code=status.HTTP_401_UNAUTHORIZED, + request=request + ) + + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + @action(detail=False, methods=['post'], permission_classes=[permissions.IsAuthenticated]) + def change_password(self, request): + """Change password endpoint.""" + serializer = ChangePasswordSerializer(data=request.data, context={'request': request}) + if serializer.is_valid(): + user = request.user + if not user.check_password(serializer.validated_data['old_password']): + return error_response( + error='Current password is incorrect', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + user.set_password(serializer.validated_data['new_password']) + user.save() + + return success_response( + message='Password changed successfully', + request=request + ) + + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + @action(detail=False, methods=['get'], permission_classes=[permissions.IsAuthenticated]) + def me(self, request): + """Get current user information.""" + # Refresh user from DB to get latest account/plan data + # This ensures account/plan changes are reflected immediately + user = User.objects.select_related('account', 'account__plan').get(id=request.user.id) + serializer = UserSerializer(user) + return success_response( + data={'user': serializer.data}, + request=request + ) + + @action(detail=False, methods=['post'], permission_classes=[permissions.AllowAny]) + def refresh(self, request): + """Refresh access token using refresh token.""" + serializer = RefreshTokenSerializer(data=request.data) + if not serializer.is_valid(): + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + refresh_token = serializer.validated_data['refresh'] + + try: + # Decode and validate refresh token + payload = decode_token(refresh_token) + + # Verify it's a refresh token + if payload.get('type') != 'refresh': + return error_response( + error='Invalid token type', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get user + user_id = payload.get('user_id') + account_id = payload.get('account_id') + + try: + user = User.objects.get(id=user_id) + except User.DoesNotExist: + return error_response( + error='User not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # Get account + account_id = payload.get('account_id') + account = None + if account_id: + try: + account = Account.objects.get(id=account_id) + except Account.DoesNotExist: + pass + + if not account: + account = getattr(user, 'account', None) + + # Generate new access token + access_token = generate_access_token(user, account) + access_expires_at = get_token_expiry('access') + + return success_response( + data={ + 'access': access_token, + 'access_expires_at': access_expires_at.isoformat() + }, + request=request + ) + + except jwt.InvalidTokenError as e: + return error_response( + error='Invalid or expired refresh token', + status_code=status.HTTP_401_UNAUTHORIZED, + request=request + ) + + @action(detail=False, methods=['post'], permission_classes=[permissions.AllowAny]) + def request_reset(self, request): + """Request password reset - sends email with reset token.""" + serializer = RequestPasswordResetSerializer(data=request.data) + if not serializer.is_valid(): + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + email = serializer.validated_data['email'] + + try: + user = User.objects.get(email=email) + except User.DoesNotExist: + # Don't reveal if email exists - return success anyway + return success_response( + message='If an account with that email exists, a password reset link has been sent.', + request=request + ) + + # Generate secure token + import secrets + token = secrets.token_urlsafe(32) + + # Create reset token (expires in 1 hour) + from django.utils import timezone + from datetime import timedelta + expires_at = timezone.now() + timedelta(hours=1) + + PasswordResetToken.objects.create( + user=user, + token=token, + expires_at=expires_at + ) + + # Send email (async via Celery if available, otherwise sync) + try: + from igny8_core.modules.system.tasks import send_password_reset_email + send_password_reset_email.delay(user.id, token) + except: + # Fallback to sync email sending + from django.core.mail import send_mail + from django.conf import settings + + reset_url = f"{request.scheme}://{request.get_host()}/reset-password?token={token}" + + send_mail( + subject='Reset Your IGNY8 Password', + message=f'Click the following link to reset your password: {reset_url}\n\nThis link expires in 1 hour.', + from_email=getattr(settings, 'DEFAULT_FROM_EMAIL', 'noreply@igny8.com'), + recipient_list=[user.email], + fail_silently=False, + ) + + return success_response( + message='If an account with that email exists, a password reset link has been sent.', + request=request + ) + + @action(detail=False, methods=['post'], permission_classes=[permissions.AllowAny]) + def reset_password(self, request): + """Reset password using reset token.""" + serializer = ResetPasswordSerializer(data=request.data) + if not serializer.is_valid(): + return error_response( + error='Validation failed', + errors=serializer.errors, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + token = serializer.validated_data['token'] + new_password = serializer.validated_data['new_password'] + + try: + reset_token = PasswordResetToken.objects.get(token=token) + except PasswordResetToken.DoesNotExist: + return error_response( + error='Invalid reset token', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Check if token is valid + if not reset_token.is_valid(): + return error_response( + error='Reset token has expired or has already been used', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Update password + user = reset_token.user + user.set_password(new_password) + user.save() + + # Mark token as used + reset_token.used = True + reset_token.save() + + return success_response( + message='Password has been reset successfully', + request=request + ) + + +# ============================================================================ +# CSV Import/Export Views for Admin +# ============================================================================ + +from django.http import HttpResponse, JsonResponse +from django.contrib.admin.views.decorators import staff_member_required +from django.views.decorators.http import require_http_methods +import csv +import io + + +@staff_member_required +@require_http_methods(["GET"]) +def industry_csv_template(request): + """Download CSV template for Industry import""" + response = HttpResponse(content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="industry_template.csv"' + + writer = csv.writer(response) + writer.writerow(['name', 'description', 'is_active']) + writer.writerow(['Technology', 'Technology industry', 'true']) + writer.writerow(['Healthcare', 'Healthcare and medical services', 'true']) + + return response + + +@staff_member_required +@require_http_methods(["POST"]) +def industry_csv_import(request): + """Import industries from CSV""" + if not request.FILES.get('csv_file'): + return JsonResponse({'success': False, 'error': 'No CSV file provided'}, status=400) + + csv_file = request.FILES['csv_file'] + decoded_file = csv_file.read().decode('utf-8') + io_string = io.StringIO(decoded_file) + reader = csv.DictReader(io_string) + + created = 0 + updated = 0 + errors = [] + + from django.utils.text import slugify + + for row_num, row in enumerate(reader, start=2): + try: + is_active = row.get('is_active', 'true').lower() in ['true', '1', 'yes'] + slug = slugify(row['name']) + + industry, created_flag = Industry.objects.update_or_create( + name=row['name'], + defaults={ + 'slug': slug, + 'description': row.get('description', ''), + 'is_active': is_active + } + ) + if created_flag: + created += 1 + else: + updated += 1 + except Exception as e: + errors.append(f"Row {row_num}: {str(e)}") + + return JsonResponse({ + 'success': True, + 'created': created, + 'updated': updated, + 'errors': errors + }) + + +@staff_member_required +@require_http_methods(["GET"]) +def industrysector_csv_template(request): + """Download CSV template for IndustrySector import""" + response = HttpResponse(content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="industrysector_template.csv"' + + writer = csv.writer(response) + writer.writerow(['name', 'industry', 'description', 'is_active']) + writer.writerow(['Software Development', 'Technology', 'Software and app development', 'true']) + writer.writerow(['Healthcare IT', 'Healthcare', 'Healthcare information technology', 'true']) + + return response + + +@staff_member_required +@require_http_methods(["POST"]) +def industrysector_csv_import(request): + """Import industry sectors from CSV""" + if not request.FILES.get('csv_file'): + return JsonResponse({'success': False, 'error': 'No CSV file provided'}, status=400) + + csv_file = request.FILES['csv_file'] + decoded_file = csv_file.read().decode('utf-8') + io_string = io.StringIO(decoded_file) + reader = csv.DictReader(io_string) + + created = 0 + updated = 0 + errors = [] + + from django.utils.text import slugify + + for row_num, row in enumerate(reader, start=2): + try: + is_active = row.get('is_active', 'true').lower() in ['true', '1', 'yes'] + slug = slugify(row['name']) + + # Find industry by name + try: + industry = Industry.objects.get(name=row['industry']) + except Industry.DoesNotExist: + errors.append(f"Row {row_num}: Industry '{row['industry']}' not found") + continue + + sector, created_flag = IndustrySector.objects.update_or_create( + name=row['name'], + industry=industry, + defaults={ + 'slug': slug, + 'description': row.get('description', ''), + 'is_active': is_active + } + ) + if created_flag: + created += 1 + else: + updated += 1 + except Exception as e: + errors.append(f"Row {row_num}: {str(e)}") + + return JsonResponse({ + 'success': True, + 'created': created, + 'updated': updated, + 'errors': errors + }) + + +@staff_member_required +@require_http_methods(["GET"]) +def seedkeyword_csv_template(request): + """Download CSV template for SeedKeyword import""" + response = HttpResponse(content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="seedkeyword_template.csv"' + + writer = csv.writer(response) + writer.writerow(['keyword', 'industry', 'sector', 'volume', 'difficulty', 'intent', 'is_active']) + writer.writerow(['python programming', 'Technology', 'Software Development', '10000', '45', 'Informational', 'true']) + writer.writerow(['medical software', 'Healthcare', 'Healthcare IT', '5000', '60', 'Commercial', 'true']) + + return response + + +@staff_member_required +@require_http_methods(["POST"]) +def seedkeyword_csv_import(request): + """Import seed keywords from CSV""" + if not request.FILES.get('csv_file'): + return JsonResponse({'success': False, 'error': 'No CSV file provided'}, status=400) + + csv_file = request.FILES['csv_file'] + decoded_file = csv_file.read().decode('utf-8') + io_string = io.StringIO(decoded_file) + reader = csv.DictReader(io_string) + + created = 0 + updated = 0 + errors = [] + + for row_num, row in enumerate(reader, start=2): + try: + is_active = row.get('is_active', 'true').lower() in ['true', '1', 'yes'] + + # Find industry and sector by name + try: + industry = Industry.objects.get(name=row['industry']) + except Industry.DoesNotExist: + errors.append(f"Row {row_num}: Industry '{row['industry']}' not found") + continue + + try: + sector = IndustrySector.objects.get(name=row['sector'], industry=industry) + except IndustrySector.DoesNotExist: + errors.append(f"Row {row_num}: Sector '{row['sector']}' not found in industry '{row['industry']}'") + continue + + keyword, created_flag = SeedKeyword.objects.update_or_create( + keyword=row['keyword'], + industry=industry, + sector=sector, + defaults={ + 'volume': int(row.get('volume', 0)), + 'difficulty': int(row.get('difficulty', 0)), + 'intent': row.get('intent', 'Informational'), + 'is_active': is_active + } + ) + if created_flag: + created += 1 + else: + updated += 1 + except Exception as e: + errors.append(f"Row {row_num}: {str(e)}") + + return JsonResponse({ + 'success': True, + 'created': created, + 'updated': updated, + 'errors': errors + }) + diff --git a/tenant/backend/igny8_core/middleware/request_id.py b/tenant/backend/igny8_core/middleware/request_id.py new file mode 100644 index 00000000..9494eee8 --- /dev/null +++ b/tenant/backend/igny8_core/middleware/request_id.py @@ -0,0 +1,43 @@ +""" +Request ID Middleware +Generates unique request ID for every request and includes it in response headers +""" +import uuid +import logging +from django.utils.deprecation import MiddlewareMixin + +logger = logging.getLogger(__name__) + + +class RequestIDMiddleware(MiddlewareMixin): + """ + Middleware that generates a unique request ID for every request + and includes it in response headers as X-Request-ID + """ + + def process_request(self, request): + """Generate or retrieve request ID""" + # Check if request ID already exists in headers + request_id = request.META.get('HTTP_X_REQUEST_ID') or request.META.get('X-Request-ID') + + if not request_id: + # Generate new request ID + request_id = str(uuid.uuid4()) + + # Store in request for use in views/exception handlers + request.request_id = request_id + + return None + + def process_response(self, request, response): + """Add request ID to response headers""" + # Get request ID from request + request_id = getattr(request, 'request_id', None) + + if request_id: + # Add to response headers + response['X-Request-ID'] = request_id + + return response + + diff --git a/tenant/backend/igny8_core/middleware/resource_tracker.py b/tenant/backend/igny8_core/middleware/resource_tracker.py new file mode 100644 index 00000000..ce7b07be --- /dev/null +++ b/tenant/backend/igny8_core/middleware/resource_tracker.py @@ -0,0 +1,135 @@ +""" +Resource Tracking Middleware +Tracks CPU, memory, and I/O usage per request for admin debugging. +""" +import psutil +import time +import threading +import logging +from django.utils.deprecation import MiddlewareMixin +from django.core.cache import cache + +logger = logging.getLogger(__name__) + + +class ResourceTrackingMiddleware(MiddlewareMixin): + """ + Middleware to track resource usage per request. + Stores metrics in cache with request ID for retrieval. + Only tracks for authenticated admin/developer users. + """ + thread_local = threading.local() + + def process_request(self, request): + """Start tracking resources for this request""" + # Only track if user is authenticated + if not hasattr(request, 'user') or not request.user.is_authenticated: + return None + + # Check if user is admin/developer + if not (hasattr(request.user, 'is_admin_or_developer') and request.user.is_admin_or_developer()): + return None + + # Check if debug tracking is enabled via header (set by frontend) + debug_enabled = request.headers.get('X-Debug-Resource-Tracking', '').lower() == 'true' + if not debug_enabled: + return None + + try: + # Generate request ID + request_id = f"req_{int(time.time() * 1000000)}" + request.resource_tracking_id = request_id + + # Get initial process stats + process = psutil.Process() + initial_cpu_times = process.cpu_times() + initial_memory = process.memory_info() + initial_io = process.io_counters() if hasattr(process, 'io_counters') else None + + # Store initial state + self.thread_local.start_time = time.time() + self.thread_local.initial_cpu_times = initial_cpu_times + self.thread_local.initial_memory = initial_memory + self.thread_local.initial_io = initial_io + self.thread_local.process = process + self.thread_local.request_id = request_id + + except Exception as e: + logger.warning(f"Error starting resource tracking: {str(e)}") + # Don't break the request if tracking fails + + return None + + def process_response(self, request, response): + """Calculate and store resource usage for this request""" + if not hasattr(request, 'resource_tracking_id'): + return response + + try: + # Calculate elapsed time + elapsed_time = time.time() - self.thread_local.start_time + + # Get final process stats + process = self.thread_local.process + final_cpu_times = process.cpu_times() + final_memory = process.memory_info() + final_io = process.io_counters() if hasattr(process, 'io_counters') else None + + # Calculate CPU usage (user + system time) + cpu_user_time = (final_cpu_times.user - self.thread_local.initial_cpu_times.user) * 1000 # ms + cpu_system_time = (final_cpu_times.system - self.thread_local.initial_cpu_times.system) * 1000 # ms + cpu_total_time = cpu_user_time + cpu_system_time + + # Calculate memory delta + memory_delta = final_memory.rss - self.thread_local.initial_memory.rss + + # Calculate I/O + io_read = 0 + io_write = 0 + if final_io and self.thread_local.initial_io: + io_read = final_io.read_bytes - self.thread_local.initial_io.read_bytes + io_write = final_io.write_bytes - self.thread_local.initial_io.write_bytes + + # Get system-wide stats + cpu_percent = psutil.cpu_percent(interval=0.1) + memory = psutil.virtual_memory() + + # Store metrics in cache (expire after 5 minutes) + metrics = { + 'request_id': request.resource_tracking_id, + 'path': request.path, + 'method': request.method, + 'elapsed_time_ms': round(elapsed_time * 1000, 2), + 'cpu': { + 'user_time_ms': round(cpu_user_time, 2), + 'system_time_ms': round(cpu_system_time, 2), + 'total_time_ms': round(cpu_total_time, 2), + 'system_percent': round(cpu_percent, 2), + }, + 'memory': { + 'delta_bytes': memory_delta, + 'delta_mb': round(memory_delta / (1024**2), 2), + 'final_rss_mb': round(final_memory.rss / (1024**2), 2), + 'system_used_percent': round(memory.percent, 2), + }, + 'io': { + 'read_bytes': io_read, + 'read_mb': round(io_read / (1024**2), 2), + 'write_bytes': io_write, + 'write_mb': round(io_write / (1024**2), 2), + }, + 'timestamp': time.time(), + } + + # Store in cache with 5 minute expiry + cache.set(f"resource_tracking_{request.resource_tracking_id}", metrics, 300) + + # Add request ID to response header + response['X-Resource-Tracking-ID'] = request.resource_tracking_id + + except Exception as e: + logger.warning(f"Error calculating resource tracking: {str(e)}") + # Don't break the response if tracking fails + + return response + diff --git a/tenant/backend/igny8_core/modules/billing/views.py b/tenant/backend/igny8_core/modules/billing/views.py new file mode 100644 index 00000000..acaa2ea2 --- /dev/null +++ b/tenant/backend/igny8_core/modules/billing/views.py @@ -0,0 +1,581 @@ +""" +ViewSets for Billing API +Unified API Standard v1.0 compliant +""" +from rest_framework import viewsets, status, permissions +from rest_framework.decorators import action +from rest_framework.response import Response +from django.db.models import Sum, Count, Q +from django.utils import timezone +from datetime import timedelta +from decimal import Decimal +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import AccountModelViewSet +from igny8_core.api.pagination import CustomPageNumberPagination +from igny8_core.api.response import success_response, error_response +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.authentication import JWTAuthentication, CSRFExemptSessionAuthentication +from igny8_core.api.permissions import IsAuthenticatedAndActive, HasTenantAccess, IsAdminOrOwner +from .models import CreditTransaction, CreditUsageLog +from .serializers import ( + CreditTransactionSerializer, CreditUsageLogSerializer, + CreditBalanceSerializer, UsageSummarySerializer, UsageLimitsSerializer +) +from .services import CreditService +from .exceptions import InsufficientCreditsError + + +@extend_schema_view( + list=extend_schema(tags=['Billing'], summary='Get credit balance'), +) +class CreditBalanceViewSet(viewsets.ViewSet): + """ + ViewSet for credit balance operations + Unified API Standard v1.0 compliant + """ + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + throttle_scope = 'billing' + throttle_classes = [DebugScopedRateThrottle] + + def list(self, request): + """Get current credit balance and usage""" + account = getattr(request, 'account', None) + if not account: + user = getattr(request, 'user', None) + if user and user.is_authenticated: + from igny8_core.auth.models import User as UserModel + user = UserModel.objects.select_related('account', 'account__plan').get(id=user.id) + account = user.account + request.account = account + + if not account: + return success_response(data={ + 'credits': 0, + 'plan_credits_per_month': 0, + 'credits_used_this_month': 0, + 'credits_remaining': 0, + }, request=request) + + # Get plan credits - plan is already associated + plan_credits_per_month = 0 + if account.plan: + plan_credits_per_month = account.plan.get_effective_credits_per_month() + + # Calculate credits used this month + now = timezone.now() + start_of_month = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + credits_used_this_month = CreditUsageLog.objects.filter( + account=account, + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + credits = account.credits or 0 + credits_remaining = credits + + data = { + 'credits': credits, + 'plan_credits_per_month': plan_credits_per_month, + 'credits_used_this_month': credits_used_this_month, + 'credits_remaining': credits_remaining, + } + + # Validate and serialize data + serializer = CreditBalanceSerializer(data=data) + serializer.is_valid(raise_exception=True) + return success_response(data=serializer.validated_data, request=request) + + +@extend_schema_view( + list=extend_schema(tags=['Billing']), + retrieve=extend_schema(tags=['Billing']), +) +class CreditUsageViewSet(AccountModelViewSet): + """ + ViewSet for credit usage logs + Unified API Standard v1.0 compliant + """ + queryset = CreditUsageLog.objects.all() + serializer_class = CreditUsageLogSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + pagination_class = CustomPageNumberPagination + throttle_scope = 'billing' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [] + + def get_queryset(self): + """Get usage logs for current account - base class handles account filtering""" + queryset = super().get_queryset() + + # Filter by operation type + operation_type = self.request.query_params.get('operation_type') + if operation_type: + queryset = queryset.filter(operation_type=operation_type) + + # Filter by date range + start_date = self.request.query_params.get('start_date') + end_date = self.request.query_params.get('end_date') + if start_date: + queryset = queryset.filter(created_at__gte=start_date) + if end_date: + queryset = queryset.filter(created_at__lte=end_date) + + return queryset.order_by('-created_at') + + @extend_schema(tags=['Billing'], summary='Get usage summary') + @action(detail=False, methods=['get']) + def summary(self, request): + """Get usage summary for date range""" + account = getattr(request, 'account', None) + if not account: + user = getattr(request, 'user', None) + if user: + account = getattr(user, 'account', None) + + if not account: + return error_response( + error='Account not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get date range from query params + start_date = request.query_params.get('start_date') + end_date = request.query_params.get('end_date') + + # Default to current month if not provided + now = timezone.now() + if not start_date: + start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + else: + from django.utils.dateparse import parse_datetime + start_date = parse_datetime(start_date) or start_date + + if not end_date: + end_date = now + else: + from django.utils.dateparse import parse_datetime + end_date = parse_datetime(end_date) or end_date + + # Get usage logs in date range + usage_logs = CreditUsageLog.objects.filter( + account=account, + created_at__gte=start_date, + created_at__lte=end_date + ) + + # Calculate totals + total_credits_used = usage_logs.aggregate(total=Sum('credits_used'))['total'] or 0 + total_cost_usd = usage_logs.aggregate(total=Sum('cost_usd'))['total'] or Decimal('0.00') + + # Group by operation type + by_operation = {} + for operation_type, _ in CreditUsageLog.OPERATION_TYPE_CHOICES: + operation_logs = usage_logs.filter(operation_type=operation_type) + credits = operation_logs.aggregate(total=Sum('credits_used'))['total'] or 0 + cost = operation_logs.aggregate(total=Sum('cost_usd'))['total'] or Decimal('0.00') + count = operation_logs.count() + + if credits > 0 or count > 0: + by_operation[operation_type] = { + 'credits': credits, + 'cost': float(cost), + 'count': count + } + + # Group by model + by_model = {} + model_stats = usage_logs.values('model_used').annotate( + credits=Sum('credits_used'), + cost=Sum('cost_usd'), + count=Count('id') + ).filter(model_used__isnull=False).exclude(model_used='') + + for stat in model_stats: + model = stat['model_used'] + by_model[model] = { + 'credits': stat['credits'] or 0, + 'cost': float(stat['cost'] or Decimal('0.00')) + } + + data = { + 'period': { + 'start': start_date.isoformat() if hasattr(start_date, 'isoformat') else str(start_date), + 'end': end_date.isoformat() if hasattr(end_date, 'isoformat') else str(end_date), + }, + 'total_credits_used': total_credits_used, + 'total_cost_usd': float(total_cost_usd), + 'by_operation': by_operation, + 'by_model': by_model, + } + + serializer = UsageSummarySerializer(data) + return success_response(data=serializer.data, request=request) + + @extend_schema(tags=['Billing'], summary='Get usage limits') + @action(detail=False, methods=['get'], url_path='limits', url_name='limits') + def limits(self, request): + """ + Get account limits and credit usage statistics (Phase 0: Credit-only system). + Returns account management limits and credit usage only. + """ + # Try multiple ways to get account + account = getattr(request, 'account', None) + + if not account: + user = getattr(request, 'user', None) + if user and user.is_authenticated: + # Try to get account from user - refresh from DB to ensure we have latest + try: + from igny8_core.auth.models import User as UserModel + # Refresh user from DB to get account relationship + user = UserModel.objects.select_related('account', 'account__plan').get(id=user.id) + account = user.account + # Also set it on request for future use + request.account = account + except (AttributeError, UserModel.DoesNotExist, Exception) as e: + account = None + + if not account: + # Return empty limits instead of error - frontend will show "no data" message + return success_response(data={'limits': []}, request=request) + + plan = account.plan + if not plan: + # Return empty limits instead of error - allows frontend to show "no plan" message + return success_response(data={'limits': []}, request=request) + + # Import models + from igny8_core.auth.models import User, Site + + # Get current month boundaries + now = timezone.now() + start_of_month = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + + # Calculate usage statistics + limits_data = [] + + # Credit Usage (Phase 0: Credit-only system) + credits_used_month = CreditUsageLog.objects.filter( + account=account, + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + # Get credits by operation type + cluster_credits = CreditUsageLog.objects.filter( + account=account, + operation_type__in=['clustering'], + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + content_credits = CreditUsageLog.objects.filter( + account=account, + operation_type__in=['content', 'content_generation'], + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + image_credits = CreditUsageLog.objects.filter( + account=account, + operation_type__in=['images', 'image_generation', 'image_prompt_extraction'], + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + idea_credits = CreditUsageLog.objects.filter( + account=account, + operation_type__in=['ideas', 'idea_generation'], + created_at__gte=start_of_month + ).aggregate(total=Sum('credits_used'))['total'] or 0 + + # Use included_credits from plan (Phase 0: Credit-only) + plan_credits = plan.included_credits or plan.credits_per_month or 0 + + limits_data.extend([ + { + 'title': 'Monthly Credits', + 'limit': plan_credits, + 'used': credits_used_month, + 'available': max(0, plan_credits - credits_used_month), + 'unit': 'credits', + 'category': 'credits', + 'percentage': (credits_used_month / plan_credits * 100) if plan_credits else 0 + }, + { + 'title': 'Current Balance', + 'limit': None, # No limit - shows current balance + 'used': None, + 'available': account.credits, + 'unit': 'credits', + 'category': 'credits', + 'percentage': None + }, + { + 'title': 'Clustering Credits', + 'limit': None, + 'used': cluster_credits, + 'available': None, + 'unit': 'credits', + 'category': 'credits', + 'percentage': None + }, + { + 'title': 'Content Generation Credits', + 'limit': None, + 'used': content_credits, + 'available': None, + 'unit': 'credits', + 'category': 'credits', + 'percentage': None + }, + { + 'title': 'Image Generation Credits', + 'limit': None, + 'used': image_credits, + 'available': None, + 'unit': 'credits', + 'category': 'credits', + 'percentage': None + }, + { + 'title': 'Idea Generation Credits', + 'limit': None, + 'used': idea_credits, + 'available': None, + 'unit': 'credits', + 'category': 'credits', + 'percentage': None + }, + ]) + + # Account Management Limits (kept - not operation limits) + users_count = User.objects.filter(account=account).count() + sites_count = Site.objects.filter(account=account).count() + + limits_data.extend([ + { + 'title': 'Users', + 'limit': plan.max_users or 0, + 'used': users_count, + 'available': max(0, (plan.max_users or 0) - users_count), + 'unit': 'users', + 'category': 'account', + 'percentage': (users_count / (plan.max_users or 1)) * 100 if plan.max_users else 0 + }, + { + 'title': 'Sites', + 'limit': plan.max_sites or 0, + 'used': sites_count, + 'available': max(0, (plan.max_sites or 0) - sites_count), + 'unit': 'sites', + 'category': 'account', + 'percentage': (sites_count / (plan.max_sites or 1)) * 100 if plan.max_sites else 0 + }, + ]) + + # Return data directly - serializer validation not needed for read-only endpoint + return success_response(data={'limits': limits_data}, request=request) + + +@extend_schema_view( + list=extend_schema(tags=['Billing']), + retrieve=extend_schema(tags=['Billing']), +) +class CreditTransactionViewSet(AccountModelViewSet): + """ + ViewSet for credit transaction history + Unified API Standard v1.0 compliant + """ + queryset = CreditTransaction.objects.all() + serializer_class = CreditTransactionSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsAdminOrOwner] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + pagination_class = CustomPageNumberPagination + throttle_scope = 'billing' + throttle_classes = [DebugScopedRateThrottle] + + def get_queryset(self): + """Get transactions for current account - base class handles account filtering""" + queryset = super().get_queryset() + + # Filter by transaction type + transaction_type = self.request.query_params.get('transaction_type') + if transaction_type: + queryset = queryset.filter(transaction_type=transaction_type) + + return queryset.order_by('-created_at') + + +class BillingOverviewViewSet(viewsets.ViewSet): + """User-facing billing overview API""" + permission_classes = [IsAuthenticatedAndActive] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + + def account_balance(self, request): + """Get account balance with subscription info""" + account = getattr(request, 'account', None) or request.user.account + + # Get subscription plan + subscription_plan = 'Free' + monthly_credits_included = 0 + if account.plan: + subscription_plan = account.plan.name + monthly_credits_included = account.plan.get_effective_credits_per_month() + + # Calculate bonus credits (credits beyond monthly allowance) + bonus_credits = max(0, account.credits - monthly_credits_included) + + data = { + 'credits': account.credits or 0, + 'subscription_plan': subscription_plan, + 'monthly_credits_included': monthly_credits_included, + 'bonus_credits': bonus_credits, + } + + return Response(data) + + +@extend_schema_view( + stats=extend_schema(tags=['Admin Billing'], summary='Admin billing stats'), + list_users=extend_schema(tags=['Admin Billing'], summary='List users with credit info'), + adjust_credits=extend_schema(tags=['Admin Billing'], summary='Adjust user credits'), + list_credit_costs=extend_schema(tags=['Admin Billing'], summary='List credit cost configurations'), + update_credit_costs=extend_schema(tags=['Admin Billing'], summary='Update credit cost configurations'), +) +class AdminBillingViewSet(viewsets.ViewSet): + """Admin-only billing management API""" + permission_classes = [IsAuthenticatedAndActive, permissions.IsAdminUser] + authentication_classes = [JWTAuthentication, CSRFExemptSessionAuthentication] + + def stats(self, request): + """Get system-wide billing statistics""" + from igny8_core.auth.models import Account + + total_users = Account.objects.filter(status='active').count() + active_users = Account.objects.filter(status='active').exclude(users__last_login__isnull=True).count() + + total_credits_issued = Account.objects.aggregate( + total=Sum('credits') + )['total'] or 0 + + total_credits_used = CreditUsageLog.objects.aggregate( + total=Sum('credits_used') + )['total'] or 0 + + return Response({ + 'total_users': total_users, + 'active_users': active_users, + 'total_credits_issued': total_credits_issued, + 'total_credits_used': total_credits_used, + }) + + def list_users(self, request): + """List all users with credit information""" + from igny8_core.auth.models import Account + from django.db.models import Q + + # Get search query from request + search = request.query_params.get('search', '') + + queryset = Account.objects.filter(status='active').prefetch_related('users') + + # Apply search filter + if search: + queryset = queryset.filter( + Q(user__username__icontains=search) | + Q(user__email__icontains=search) + ) + + accounts = queryset[:100] + + data = [] + for acc in accounts: + user = acc.users.first() if acc.users.exists() else None + data.append({ + 'id': acc.id, + 'username': user.username if user else 'N/A', + 'email': user.email if user else 'N/A', + 'credits': acc.credits or 0, + 'subscription_plan': acc.plan.name if acc.plan else 'Free', + 'is_active': acc.status == 'active', + 'date_joined': acc.created_at + }) + + return Response({'results': data}) + + def adjust_credits(self, request, user_id): + """Adjust credits for a specific user""" + from igny8_core.auth.models import Account + + try: + account = Account.objects.get(id=user_id) + except Account.DoesNotExist: + return Response({'error': 'User not found'}, status=404) + + amount = request.data.get('amount', 0) + reason = request.data.get('reason', 'Admin adjustment') + + try: + amount = int(amount) + except (ValueError, TypeError): + return Response({'error': 'Invalid amount'}, status=400) + + # Adjust credits + old_balance = account.credits + account.credits = (account.credits or 0) + amount + account.save() + + # Log the adjustment + CreditUsageLog.objects.create( + account=account, + operation_type='admin_adjustment', + credits_used=-amount, # Negative for additions + credits_balance_after=account.credits, + details={'reason': reason, 'old_balance': old_balance, 'adjusted_by': request.user.id} + ) + + return Response({ + 'success': True, + 'new_balance': account.credits, + 'old_balance': old_balance, + 'adjustment': amount + }) + + def list_credit_costs(self, request): + """List credit cost configurations""" + from igny8_core.business.billing.models import CreditCostConfig + + configs = CreditCostConfig.objects.filter(is_active=True) + + data = [{ + 'id': c.id, + 'operation_type': c.operation_type, + 'display_name': c.display_name, + 'credits_cost': c.credits_cost, + 'unit': c.unit, + 'is_active': c.is_active, + 'created_at': c.created_at + } for c in configs] + + return Response({'results': data}) + + def update_credit_costs(self, request): + """Update credit cost configurations""" + from igny8_core.business.billing.models import CreditCostConfig + + updates = request.data.get('updates', []) + + for update in updates: + config_id = update.get('id') + new_cost = update.get('cost') + + if config_id and new_cost is not None: + try: + config = CreditCostConfig.objects.get(id=config_id) + config.cost = new_cost + config.save() + except CreditCostConfig.DoesNotExist: + continue + + return Response({'success': True}) + diff --git a/tenant/backend/igny8_core/modules/planner/views.py b/tenant/backend/igny8_core/modules/planner/views.py new file mode 100644 index 00000000..f90cb1f3 --- /dev/null +++ b/tenant/backend/igny8_core/modules/planner/views.py @@ -0,0 +1,1182 @@ +from rest_framework import viewsets, filters, status +from rest_framework.decorators import action +from rest_framework.response import Response +from django_filters.rest_framework import DjangoFilterBackend +from django.db import transaction +from django.db.models import Max, Count, Sum, Q +from django.http import HttpResponse +import csv +import json +import time +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import SiteSectorModelViewSet +from igny8_core.api.pagination import CustomPageNumberPagination +from igny8_core.api.response import success_response, error_response +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.permissions import IsAuthenticatedAndActive, IsViewerOrAbove, IsEditorOrAbove +from .models import Keywords, Clusters, ContentIdeas +from .serializers import KeywordSerializer, ContentIdeasSerializer +from .cluster_serializers import ClusterSerializer +from igny8_core.business.planning.services.clustering_service import ClusteringService +from igny8_core.business.planning.services.ideas_service import IdeasService +from igny8_core.business.billing.exceptions import InsufficientCreditsError + + +@extend_schema_view( + list=extend_schema(tags=['Planner']), + create=extend_schema(tags=['Planner']), + retrieve=extend_schema(tags=['Planner']), + update=extend_schema(tags=['Planner']), + partial_update=extend_schema(tags=['Planner']), + destroy=extend_schema(tags=['Planner']), +) +class KeywordViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing keywords with CRUD operations + Provides list, create, retrieve, update, and destroy actions + Unified API Standard v1.0 compliant + """ + queryset = Keywords.objects.all() + serializer_class = KeywordSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination # Explicitly use custom pagination + throttle_scope = 'planner' + throttle_classes = [DebugScopedRateThrottle] + + # DRF filtering configuration + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + + # Search configuration - search by seed_keyword's keyword field + search_fields = ['seed_keyword__keyword'] + + # Ordering configuration - allow ordering by created_at, volume, difficulty (from seed_keyword) + ordering_fields = ['created_at', 'seed_keyword__volume', 'seed_keyword__difficulty'] + ordering = ['-created_at'] # Default ordering (newest first) + + # Filter configuration - filter by status, cluster_id, and seed_keyword fields + filterset_fields = ['status', 'cluster_id', 'seed_keyword__intent', 'seed_keyword_id'] + + def get_queryset(self): + """ + Override to support custom difficulty range filtering + Uses parent's get_queryset() which properly handles developer role and site/sector filtering + """ + import logging + logger = logging.getLogger(__name__) + + try: + # Use parent's get_queryset() which handles developer role and site filtering correctly + queryset = super().get_queryset() + + # Safely access query_params (DRF wraps request with Request class) + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + # Fallback for non-DRF requests + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + # Custom difficulty range filtering (check override first, then seed_keyword) + difficulty_min = query_params.get('difficulty_min') + difficulty_max = query_params.get('difficulty_max') + if difficulty_min is not None: + try: + # Filter by seed_keyword difficulty (override logic handled in property) + queryset = queryset.filter( + Q(difficulty_override__gte=int(difficulty_min)) | + Q(difficulty_override__isnull=True, seed_keyword__difficulty__gte=int(difficulty_min)) + ) + except (ValueError, TypeError): + pass + if difficulty_max is not None: + try: + queryset = queryset.filter( + Q(difficulty_override__lte=int(difficulty_max)) | + Q(difficulty_override__isnull=True, seed_keyword__difficulty__lte=int(difficulty_max)) + ) + except (ValueError, TypeError): + pass + + # Custom volume range filtering (check override first, then seed_keyword) + volume_min = query_params.get('volume_min') + volume_max = query_params.get('volume_max') + if volume_min is not None: + try: + queryset = queryset.filter( + Q(volume_override__gte=int(volume_min)) | + Q(volume_override__isnull=True, seed_keyword__volume__gte=int(volume_min)) + ) + except (ValueError, TypeError): + pass + if volume_max is not None: + try: + queryset = queryset.filter( + Q(volume_override__lte=int(volume_max)) | + Q(volume_override__isnull=True, seed_keyword__volume__lte=int(volume_max)) + ) + except (ValueError, TypeError): + pass + + return queryset + except Exception as e: + logger.error(f"Error in KeywordViewSet.get_queryset(): {type(e).__name__}: {str(e)}", exc_info=True) + # Return empty queryset instead of raising exception + return Keywords.objects.none() + + def list(self, request, *args, **kwargs): + """ + Override list method to add error handling + """ + import logging + logger = logging.getLogger(__name__) + + try: + queryset = self.filter_queryset(self.get_queryset()) + page = self.paginate_queryset(queryset) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return success_response( + data=serializer.data, + request=request + ) + except Exception as e: + logger.error(f"Error in KeywordViewSet.list(): {type(e).__name__}: {str(e)}", exc_info=True) + return error_response( + error=f'Error loading keywords: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def perform_create(self, serializer): + """Require explicit site_id and sector_id - no defaults.""" + user = getattr(self.request, 'user', None) + + # Get site_id and sector_id from validated_data or query params + # Safely access query_params + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = serializer.validated_data.get('site_id') or query_params.get('site_id') + sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id') + + # Import here to avoid circular imports + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + # Site ID is REQUIRED + if not site_id: + raise ValidationError("site_id is required. Please select a site.") + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + raise ValidationError(f"Site with id {site_id} does not exist") + + # Sector ID is REQUIRED + if not sector_id: + raise ValidationError("sector_id is required. Please select a sector.") + + try: + sector = Sector.objects.get(id=sector_id) + # Verify sector belongs to the site + if sector.site_id != site_id: + raise ValidationError(f"Sector '{sector.name}' does not belong to the selected site") + except Sector.DoesNotExist: + raise ValidationError(f"Sector with id {sector_id} does not exist") + + # Remove site_id and sector_id from validated_data as they're not model fields + serializer.validated_data.pop('site_id', None) + serializer.validated_data.pop('sector_id', None) + + # Get account from site or user + account = getattr(self.request, 'account', None) + if not account and user and user.is_authenticated: + account = getattr(user, 'account', None) + + if not account: + account = getattr(site, 'account', None) + + # Save with all required fields explicitly + serializer.save(account=account, site=site, sector=sector) + + @action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete') + def bulk_delete(self, request): + """Bulk delete keywords""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + deleted_count, _ = queryset.filter(id__in=ids).delete() + + return success_response(data={'deleted_count': deleted_count}, request=request) + + @action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update') + def bulk_update(self, request): + """Bulk update cluster status""" + ids = request.data.get('ids', []) + status_value = request.data.get('status') + + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + if not status_value: + return error_response( + error='No status provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + updated_count = queryset.filter(id__in=ids).update(status=status_value) + + return success_response(data={'updated_count': updated_count}, request=request) + + @action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update') + def bulk_update(self, request): + """Bulk update keyword status""" + ids = request.data.get('ids', []) + status_value = request.data.get('status') + + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + if not status_value: + return error_response( + error='No status provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + updated_count = queryset.filter(id__in=ids).update(status=status_value) + + return success_response(data={'updated_count': updated_count}, request=request) + + @action(detail=False, methods=['post'], url_path='bulk_add_from_seed', url_name='bulk_add_from_seed') + def bulk_add_from_seed(self, request): + """Bulk add SeedKeywords to workflow (create Keywords records)""" + from igny8_core.auth.models import SeedKeyword, Site, Sector + + seed_keyword_ids = request.data.get('seed_keyword_ids', []) + site_id = request.data.get('site_id') + sector_id = request.data.get('sector_id') + + if not seed_keyword_ids: + return error_response( + error='No seed keyword IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + if not site_id: + return error_response( + error='site_id is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + if not sector_id: + return error_response( + error='sector_id is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + site = Site.objects.get(id=site_id) + sector = Sector.objects.get(id=sector_id) + except (Site.DoesNotExist, Sector.DoesNotExist) as e: + return error_response( + error=f'Invalid site or sector: {str(e)}', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Validate sector belongs to site + if sector.site != site: + return error_response( + error='Sector does not belong to the specified site', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account from site + account = site.account + if not account: + return error_response( + error='Site has no account assigned', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get SeedKeywords + seed_keywords = SeedKeyword.objects.filter(id__in=seed_keyword_ids, is_active=True) + + if not seed_keywords.exists(): + return error_response( + error='No valid seed keywords found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + created_count = 0 + skipped_count = 0 + errors = [] + + with transaction.atomic(): + for seed_keyword in seed_keywords: + try: + # Validate industry/sector match + if site.industry != seed_keyword.industry: + errors.append( + f"Keyword '{seed_keyword.keyword}': industry mismatch " + f"(site={site.industry.name if site.industry else 'None'}, " + f"seed={seed_keyword.industry.name if seed_keyword.industry else 'None'})" + ) + skipped_count += 1 + continue + + # Check if sector has industry_sector set + if not sector.industry_sector: + errors.append( + f"Keyword '{seed_keyword.keyword}': sector '{sector.name}' has no industry_sector set. " + f"Please update the sector to reference an industry sector." + ) + skipped_count += 1 + continue + + if sector.industry_sector != seed_keyword.sector: + errors.append( + f"Keyword '{seed_keyword.keyword}': sector mismatch " + f"(sector={sector.industry_sector.name if sector.industry_sector else 'None'}, " + f"seed={seed_keyword.sector.name if seed_keyword.sector else 'None'})" + ) + skipped_count += 1 + continue + + # Create Keyword if it doesn't exist + # New keywords should default to status 'new' (per updated workflow plan) + keyword, created = Keywords.objects.get_or_create( + seed_keyword=seed_keyword, + site=site, + sector=sector, + defaults={ + 'status': 'new', + 'account': account + } + ) + + # Ensure status is explicitly set to 'new' for newly created keywords + if created: + if getattr(keyword, 'status', None) != 'new': + keyword.status = 'new' + keyword.save(update_fields=['status']) + created_count += 1 + else: + skipped_count += 1 + + except Exception as e: + errors.append(f"Error adding '{seed_keyword.keyword}': {str(e)}") + skipped_count += 1 + + return success_response( + data={ + 'created': created_count, + 'skipped': skipped_count, + 'errors': errors[:10] if errors else [] # Limit errors to first 10 + }, + request=request + ) + + @action(detail=False, methods=['get'], url_path='export', url_name='export') + def export(self, request): + """ + Export keywords to CSV + Query params: search, status, cluster_id, ids (comma-separated) + Note: Always exports as CSV. The 'format' parameter is ignored to avoid DRF format suffix conflicts. + If 'ids' parameter is provided, ONLY those IDs will be exported (other filters are ignored). + """ + # Get base queryset with site/sector/account filtering + queryset = self.get_queryset() + + # Handle IDs filter for bulk export of selected records + # If IDs are provided, ONLY export those IDs and ignore all other filters + ids_param = request.query_params.get('ids', '') + if ids_param: + try: + ids_list = [int(id_str.strip()) for id_str in ids_param.split(',') if id_str.strip()] + if ids_list: + print(f"Backend parses IDs: {ids_list}") + # Filter ONLY by IDs when IDs parameter is present + queryset = queryset.filter(id__in=ids_list) + print(f"Backend filters queryset: queryset.filter(id__in={ids_list})") + except (ValueError, TypeError): + # If IDs parsing fails, fall through to regular filtering + queryset = self.filter_queryset(queryset) + else: + # Apply all filters from query params (search, status, cluster_id) when no IDs specified + queryset = self.filter_queryset(queryset) + + # Export all matching records + keywords = queryset.all() + record_count = keywords.count() + print(f"Backend generates CSV with only those {record_count} records") + + # Generate CSV + response = HttpResponse(content_type='text/csv') + response['Content-Disposition'] = 'attachment; filename="keywords.csv"' + + writer = csv.writer(response) + # Header row + writer.writerow(['ID', 'Keyword', 'Volume', 'Difficulty', 'Intent', 'Status', 'Cluster ID', 'Created At']) + + # Data rows + for keyword in keywords: + writer.writerow([ + keyword.id, + keyword.keyword, + keyword.volume, + keyword.difficulty, + keyword.intent, + keyword.status, + keyword.cluster_id or '', + keyword.created_at.isoformat() if keyword.created_at else '', + ]) + + # Print raw CSV content for debugging + csv_content = response.content.decode('utf-8') + print("=== RAW CSV CONTENT ===") + print(csv_content) + print("=== END CSV CONTENT ===") + print("Backend returns CSV as HTTP response") + + return response + + @action(detail=False, methods=['post'], url_path='import_keywords', url_name='import_keywords') + def import_keywords(self, request): + """ + Import keywords from CSV file. + Automatically links keywords to current active site/sector. + """ + if 'file' not in request.FILES: + return error_response( + error='No file provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + file = request.FILES['file'] + if not file.name.endswith('.csv'): + return error_response( + error='File must be a CSV', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + user = getattr(request, 'user', None) + + # Get site_id and sector_id from query params or use active site + try: + query_params = getattr(request, 'query_params', None) + if query_params is None: + query_params = getattr(request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = query_params.get('site_id') + sector_id = query_params.get('sector_id') + + # Import here to avoid circular imports + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + # Site ID is REQUIRED + if not site_id: + return error_response( + error='site_id is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + return error_response( + error=f'Site with id {site_id} does not exist', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Sector ID is REQUIRED + if not sector_id: + return error_response( + error='sector_id is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + sector = Sector.objects.get(id=sector_id) + if sector.site_id != site_id: + return error_response( + error='Sector does not belong to the selected site', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + except Sector.DoesNotExist: + return error_response( + error=f'Sector with id {sector_id} does not exist', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account + account = getattr(request, 'account', None) + if not account and user and user.is_authenticated: + account = getattr(user, 'account', None) + if not account: + account = getattr(site, 'account', None) + + # Parse CSV + try: + decoded_file = file.read().decode('utf-8') + csv_reader = csv.DictReader(decoded_file.splitlines()) + + imported_count = 0 + skipped_count = 0 + errors = [] + + with transaction.atomic(): + for row_num, row in enumerate(csv_reader, start=2): # Start at 2 (header is row 1) + try: + keyword_text = row.get('keyword', '').strip() + if not keyword_text: + skipped_count += 1 + continue + + # Check if keyword already exists for this site/sector + existing = Keywords.objects.filter( + keyword=keyword_text, + site=site, + sector=sector, + account=account + ).first() + + if existing: + skipped_count += 1 + continue + + # Create keyword + Keywords.objects.create( + keyword=keyword_text, + volume=int(row.get('volume', 0) or 0), + difficulty=int(row.get('difficulty', 0) or 0), + intent=row.get('intent', 'informational') or 'informational', + status=row.get('status', 'new') or 'new', + site=site, + sector=sector, + account=account + ) + imported_count += 1 + except Exception as e: + errors.append(f"Row {row_num}: {str(e)}") + continue + + return success_response( + data={ + 'imported': imported_count, + 'skipped': skipped_count, + 'errors': errors[:10] if errors else [] # Limit errors to first 10 + }, + request=request + ) + + except Exception as e: + return error_response( + error=f'Failed to parse CSV: {str(e)}', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + @action(detail=False, methods=['post'], url_path='auto_cluster', url_name='auto_cluster') + def auto_cluster(self, request): + """Auto-cluster keywords using ClusteringService""" + import logging + from igny8_core.ai.validators.cluster_validators import validate_minimum_keywords + + logger = logging.getLogger(__name__) + + try: + keyword_ids = request.data.get('ids', []) + sector_id = request.data.get('sector_id') + + # Get account + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # NEW: Validate minimum keywords BEFORE queuing task + if not keyword_ids: + return error_response( + error='No keyword IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + validation = validate_minimum_keywords( + keyword_ids=keyword_ids, + account=account, + min_required=5 + ) + + if not validation['valid']: + return error_response( + error=validation['error'], + status_code=status.HTTP_400_BAD_REQUEST, + request=request, + extra_data={ + 'count': validation.get('count'), + 'required': validation.get('required') + } + ) + + # Validation passed - proceed with clustering + # Use service to cluster keywords + service = ClusteringService() + try: + result = service.cluster_keywords(keyword_ids, account, sector_id) + + if result.get('success'): + if 'task_id' in result: + # Async task queued + return success_response( + data={'task_id': result['task_id']}, + message=f'Clustering started with {validation["count"]} keywords', + request=request + ) + else: + # Synchronous execution + return success_response( + data=result, + request=request + ) + else: + return error_response( + error=result.get('error', 'Clustering failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except InsufficientCreditsError as e: + return error_response( + error=str(e), + status_code=status.HTTP_402_PAYMENT_REQUIRED, + request=request + ) + except Exception as e: + logger.error(f"Error in auto_cluster: {str(e)}", exc_info=True) + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + logger.error(f"Unexpected error in auto_cluster: {str(e)}", exc_info=True) + return error_response( + error=f'Unexpected error: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Planner']), + create=extend_schema(tags=['Planner']), + retrieve=extend_schema(tags=['Planner']), + update=extend_schema(tags=['Planner']), + partial_update=extend_schema(tags=['Planner']), + destroy=extend_schema(tags=['Planner']), +) +class ClusterViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing clusters with CRUD operations + Unified API Standard v1.0 compliant + """ + queryset = Clusters.objects.all() + serializer_class = ClusterSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination # Explicitly use custom pagination + throttle_scope = 'planner' + throttle_classes = [DebugScopedRateThrottle] + + # DRF filtering configuration + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + + # Search configuration - search by name + search_fields = ['name'] + + # Ordering configuration + ordering_fields = ['name', 'created_at', 'keywords_count', 'volume', 'difficulty'] + ordering = ['name'] # Default ordering + + # Filter configuration + filterset_fields = ['status'] + + def get_queryset(self): + """ + Get all clusters - keywords_count, volume, and difficulty are calculated in the serializer + since there's no ForeignKey relationship between Clusters and Keywords + Uses parent's get_queryset for filtering + Annotates queryset with volume and difficulty for filtering + """ + queryset = super().get_queryset() + + # Annotate queryset with aggregated volume and difficulty for filtering + from django.db.models import Sum, Avg, Case, When, F, IntegerField + + # Since volume and difficulty are properties (not DB fields), we need to use + # COALESCE to check volume_override/difficulty_override first, then fallback to seed_keyword + # Volume: COALESCE(volume_override, seed_keyword__volume) + # Difficulty: COALESCE(difficulty_override, seed_keyword__difficulty) + queryset = queryset.annotate( + _annotated_volume=Sum( + Case( + When(keywords__volume_override__isnull=False, then=F('keywords__volume_override')), + default=F('keywords__seed_keyword__volume'), + output_field=IntegerField() + ) + ), + _annotated_difficulty=Avg( + Case( + When(keywords__difficulty_override__isnull=False, then=F('keywords__difficulty_override')), + default=F('keywords__seed_keyword__difficulty'), + output_field=IntegerField() + ) + ) + ) + + # Apply volume range filtering + query_params = getattr(self.request, 'query_params', {}) + volume_min = query_params.get('volume_min') + volume_max = query_params.get('volume_max') + if volume_min is not None: + try: + queryset = queryset.filter(_annotated_volume__gte=int(volume_min)) + except (ValueError, TypeError): + pass + if volume_max is not None: + try: + queryset = queryset.filter(_annotated_volume__lte=int(volume_max)) + except (ValueError, TypeError): + pass + + # Apply difficulty range filtering + difficulty_min = query_params.get('difficulty_min') + difficulty_max = query_params.get('difficulty_max') + if difficulty_min is not None: + try: + queryset = queryset.filter(_annotated_difficulty__gte=float(difficulty_min)) + except (ValueError, TypeError): + pass + if difficulty_max is not None: + try: + queryset = queryset.filter(_annotated_difficulty__lte=float(difficulty_max)) + except (ValueError, TypeError): + pass + + return queryset + + def perform_create(self, serializer): + """Require explicit site_id and sector_id - no defaults.""" + user = getattr(self.request, 'user', None) + + # Get site_id and sector_id from validated_data or query params + # Safely access query_params + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + # Fallback for non-DRF requests + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = serializer.validated_data.get('site_id') or query_params.get('site_id') + sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id') + + # Import here to avoid circular imports + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + # Site ID is REQUIRED + if not site_id: + raise ValidationError("site_id is required. Please select a site.") + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + raise ValidationError(f"Site with id {site_id} does not exist") + + # Sector ID is REQUIRED + if not sector_id: + raise ValidationError("sector_id is required. Please select a sector.") + + try: + sector = Sector.objects.get(id=sector_id) + # Verify sector belongs to the site + if sector.site_id != site_id: + raise ValidationError(f"Sector '{sector.name}' does not belong to the selected site") + except Sector.DoesNotExist: + raise ValidationError(f"Sector with id {sector_id} does not exist") + + # Remove site_id and sector_id from validated_data as they're not model fields + serializer.validated_data.pop('site_id', None) + serializer.validated_data.pop('sector_id', None) + + # Get account from site or user + account = getattr(self.request, 'account', None) + if not account and user and user.is_authenticated: + account = getattr(user, 'account', None) + + if not account: + account = getattr(site, 'account', None) + + # Save with all required fields explicitly + serializer.save(account=account, site=site, sector=sector) + + @action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete') + def bulk_delete(self, request): + """Bulk delete clusters""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + deleted_count, _ = queryset.filter(id__in=ids).delete() + + return success_response(data={'deleted_count': deleted_count}, request=request) + + @action(detail=False, methods=['post'], url_path='auto_generate_ideas', url_name='auto_generate_ideas') + def auto_generate_ideas(self, request): + """Auto-generate ideas for clusters using IdeasService""" + import logging + + logger = logging.getLogger(__name__) + + try: + cluster_ids = request.data.get('ids', []) + + # Get account + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Use service to generate ideas + service = IdeasService() + try: + result = service.generate_ideas(cluster_ids, account) + + if result.get('success'): + if 'task_id' in result: + # Async task queued + return success_response( + data={'task_id': result['task_id']}, + message=result.get('message', 'Idea generation started'), + request=request + ) + else: + # Synchronous execution + return success_response( + data=result, + request=request + ) + else: + return error_response( + error=result.get('error', 'Idea generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except InsufficientCreditsError as e: + return error_response( + error=str(e), + status_code=status.HTTP_402_PAYMENT_REQUIRED, + request=request + ) + except Exception as e: + logger.error(f"Error in auto_generate_ideas: {str(e)}", exc_info=True) + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + logger.error(f"Unexpected error in auto_generate_ideas: {str(e)}", exc_info=True) + return error_response( + error=f'Unexpected error: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def list(self, request, *args, **kwargs): + """ + Override list to optimize keyword stats calculation using bulk aggregation + """ + queryset = self.filter_queryset(self.get_queryset()) + + # Handle pagination first + page = self.paginate_queryset(queryset) + if page is not None: + # Optimize keyword stats for the paginated clusters + cluster_list = list(page) + ClusterSerializer.prefetch_keyword_stats(cluster_list) + serializer = self.get_serializer(cluster_list, many=True) + return self.get_paginated_response(serializer.data) + + # No pagination - optimize all clusters + cluster_list = list(queryset) + ClusterSerializer.prefetch_keyword_stats(cluster_list) + serializer = self.get_serializer(cluster_list, many=True) + return success_response( + data=serializer.data, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Planner']), + create=extend_schema(tags=['Planner']), + retrieve=extend_schema(tags=['Planner']), + update=extend_schema(tags=['Planner']), + partial_update=extend_schema(tags=['Planner']), + destroy=extend_schema(tags=['Planner']), +) +class ContentIdeasViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing content ideas with CRUD operations + Unified API Standard v1.0 compliant + """ + queryset = ContentIdeas.objects.all() + serializer_class = ContentIdeasSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination + throttle_scope = 'planner' + throttle_classes = [DebugScopedRateThrottle] # Explicitly use custom pagination + + # DRF filtering configuration + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + + # Search configuration - search by idea_title + search_fields = ['idea_title'] + + # Ordering configuration + ordering_fields = ['idea_title', 'created_at', 'estimated_word_count'] + ordering = ['-created_at'] # Default ordering (newest first) + + # Filter configuration (updated for new structure) + filterset_fields = ['status', 'keyword_cluster_id', 'content_type', 'content_structure'] + + def perform_create(self, serializer): + """Require explicit site_id and sector_id - no defaults.""" + user = getattr(self.request, 'user', None) + + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = serializer.validated_data.get('site_id') or query_params.get('site_id') + sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id') + + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + # Site ID is REQUIRED + if not site_id: + raise ValidationError("site_id is required. Please select a site.") + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + raise ValidationError(f"Site with id {site_id} does not exist") + + # Sector ID is REQUIRED + if not sector_id: + raise ValidationError("sector_id is required. Please select a sector.") + + try: + sector = Sector.objects.get(id=sector_id) + if sector.site_id != site_id: + raise ValidationError(f"Sector does not belong to the selected site") + except Sector.DoesNotExist: + raise ValidationError(f"Sector with id {sector_id} does not exist") + + serializer.validated_data.pop('site_id', None) + serializer.validated_data.pop('sector_id', None) + + account = getattr(self.request, 'account', None) + if not account and user and user.is_authenticated: + account = getattr(user, 'account', None) + if not account: + account = getattr(site, 'account', None) + + serializer.save(account=account, site=site, sector=sector) + + @action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete') + def bulk_delete(self, request): + """Bulk delete content ideas""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + deleted_count, _ = queryset.filter(id__in=ids).delete() + + return success_response(data={'deleted_count': deleted_count}, request=request) + + @action(detail=False, methods=['post'], url_path='bulk_queue_to_writer', url_name='bulk_queue_to_writer') + def bulk_queue_to_writer(self, request): + """Queue ideas to writer by creating Tasks""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + # Get ALL requested ideas first (don't filter by status yet) + all_ideas = queryset.filter(id__in=ids) + + # Check which ones can be queued (status='new') + queueable_ideas = all_ideas.filter(status='new') + + from igny8_core.modules.writer.models import Tasks + + created_tasks = [] + errors = [] + skipped = [] + + # Add skipped ideas (not 'new' status) + for idea in all_ideas: + if idea.status != 'new': + skipped.append({ + 'idea_id': idea.id, + 'title': idea.idea_title, + 'reason': f'Already {idea.status}' + }) + + # Process queueable ideas + for idea in queueable_ideas: + try: + # Validate required fields + if not idea.keyword_cluster: + errors.append({ + 'idea_id': idea.id, + 'title': idea.idea_title, + 'error': 'Missing required cluster - assign idea to a cluster first' + }) + continue + + # Build keywords string from idea's keyword objects + keywords_str = '' + if idea.keyword_objects.exists(): + keywords_str = ', '.join([kw.keyword for kw in idea.keyword_objects.all()]) + elif idea.target_keywords: + keywords_str = idea.target_keywords + + # Direct copy - no mapping needed + task = Tasks.objects.create( + title=idea.idea_title, + description=idea.description or '', + cluster=idea.keyword_cluster, + content_type=idea.content_type or 'post', + content_structure=idea.content_structure or 'article', + taxonomy_term=None, # Can be set later if taxonomy is available + keywords=keywords_str, # Comma-separated keywords string + status='queued', + account=idea.account, + site=idea.site, + sector=idea.sector, + idea=idea, # Link back to the original idea + ) + + created_tasks.append(task.id) + + # Update idea status to queued + idea.status = 'queued' + idea.save() + except Exception as e: + errors.append({ + 'idea_id': idea.id, + 'title': idea.idea_title, + 'error': str(e) + }) + + # Return appropriate response based on results + if len(created_tasks) == 0 and (errors or skipped): + # Complete failure + return error_response( + error=f'Failed to queue any ideas: {len(errors)} errors, {len(skipped)} skipped', + errors=errors if errors else skipped, + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + elif errors: + # Partial success - some created, some failed + return success_response( + data={ + 'created_count': len(created_tasks), + 'task_ids': created_tasks, + 'errors': errors, + 'skipped': skipped, + }, + message=f'Queued {len(created_tasks)} ideas ({len(errors)} failed, {len(skipped)} skipped)', + request=request + ) + else: + # Complete success + return success_response( + data={ + 'created_count': len(created_tasks), + 'task_ids': created_tasks, + 'skipped': skipped, + }, + message=f'Successfully queued {len(created_tasks)} ideas to writer' + (f' ({len(skipped)} already scheduled)' if skipped else ''), + request=request + ) + + # REMOVED: generate_idea action - idea generation function removed diff --git a/tenant/backend/igny8_core/modules/system/integration_views.py b/tenant/backend/igny8_core/modules/system/integration_views.py new file mode 100644 index 00000000..e900f2de --- /dev/null +++ b/tenant/backend/igny8_core/modules/system/integration_views.py @@ -0,0 +1,1392 @@ +""" +Integration settings views - for OpenAI, Runware, GSC integrations +Unified API Standard v1.0 compliant +""" +import logging +from rest_framework import viewsets, status +from rest_framework.decorators import action +from django.db import transaction +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import AccountModelViewSet +from igny8_core.api.response import success_response, error_response +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.permissions import IsAuthenticatedAndActive, HasTenantAccess, IsSystemAccountOrDeveloper +from django.conf import settings + +logger = logging.getLogger(__name__) + + +@extend_schema_view( + list=extend_schema(tags=['System']), + retrieve=extend_schema(tags=['System']), + update=extend_schema(tags=['System']), + test_connection=extend_schema(tags=['System']), + task_progress=extend_schema(tags=['System']), + get_image_generation_settings=extend_schema(tags=['System']), +) +class IntegrationSettingsViewSet(viewsets.ViewSet): + """ + ViewSet for managing integration settings (OpenAI, Runware, GSC) + Following reference plugin pattern: WordPress uses update_option() for igny8_api_settings + We store in IntegrationSettings model with account isolation + """ + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess, IsSystemAccountOrDeveloper] + + throttle_scope = 'system_admin' + throttle_classes = [DebugScopedRateThrottle] + + def list(self, request): + """List all integrations - for debugging URL patterns""" + logger.info("[IntegrationSettingsViewSet] list() called") + return success_response( + data={ + 'message': 'IntegrationSettingsViewSet is working', + 'available_endpoints': [ + 'GET /api/v1/system/settings/integrations//', + 'POST /api/v1/system/settings/integrations//save/', + 'POST /api/v1/system/settings/integrations//test/', + 'POST /api/v1/system/settings/integrations//generate/', + ] + }, + request=request + ) + + def retrieve(self, request, pk=None): + """Get integration settings - GET /api/v1/system/settings/integrations/{pk}/""" + return self.get_settings(request, pk) + + def update(self, request, pk=None): + """Save integration settings (PUT) - PUT /api/v1/system/settings/integrations/{pk}/""" + return self.save_settings(request, pk) + + def save_post(self, request, pk=None, **kwargs): + """Save integration settings (POST) - POST /api/v1/system/settings/integrations/{pk}/save/ + This matches the frontend endpoint call exactly. + Reference plugin: WordPress form submits to options.php which calls update_option() via register_setting callback. + We save to IntegrationSettings model instead. + """ + # Extract pk from kwargs if not passed as parameter (DRF passes via **kwargs) + if not pk: + pk = kwargs.get('pk') + return self.save_settings(request, pk) + + @action(detail=True, methods=['post'], url_path='test', url_name='test') + def test_connection(self, request, pk=None): + """ + Test API connection for OpenAI or Runware + Supports two modes: + - with_response=false: Simple connection test (GET /v1/models) + - with_response=true: Full response test with ping message + """ + integration_type = pk # 'openai', 'runware' + + logger.info(f"[test_connection] Called for integration_type={integration_type}, user={getattr(request, 'user', None)}, account={getattr(request, 'account', None)}") + + if not integration_type: + return error_response( + error='Integration type is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get API key and config from request or saved settings + config = request.data.get('config', {}) if isinstance(request.data.get('config'), dict) else {} + api_key = request.data.get('apiKey') or config.get('apiKey') + + # Merge request.data with config if config is a dict + if not isinstance(config, dict): + config = {} + + if not api_key: + # Try to get from saved settings + account = getattr(request, 'account', None) + logger.info(f"[test_connection] Account from request: {account.id if account else None}") + # Fallback to user's account + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + # Fallback to default account + if not account: + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + except Exception: + pass + + if account: + try: + from .models import IntegrationSettings + logger.info(f"[test_connection] Looking for saved settings for account {account.id}") + saved_settings = IntegrationSettings.objects.get( + integration_type=integration_type, + account=account + ) + api_key = saved_settings.config.get('apiKey') + logger.info(f"[test_connection] Found saved settings, has_apiKey={bool(api_key)}") + except IntegrationSettings.DoesNotExist: + logger.warning(f"[test_connection] No saved settings found for {integration_type} and account {account.id}") + pass + + if not api_key: + logger.error(f"[test_connection] No API key found in request or saved settings") + return error_response( + error='API key is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + logger.info(f"[test_connection] Testing {integration_type} connection with API key (length={len(api_key) if api_key else 0})") + try: + if integration_type == 'openai': + return self._test_openai(api_key, config, request) + elif integration_type == 'runware': + return self._test_runware(api_key, request) + else: + return error_response( + error=f'Validation not supported for {integration_type}', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + except Exception as e: + logger.error(f"Error testing {integration_type} connection: {str(e)}", exc_info=True) + import traceback + error_trace = traceback.format_exc() + logger.error(f"Full traceback: {error_trace}") + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def _test_openai(self, api_key: str, config: dict = None, request=None): + """ + Test OpenAI API connection. + EXACT match to reference plugin's igny8_test_connection() function. + Reference: ai/openai-api.php line 186-309 + """ + import requests + + # Get model from config or use default (reference plugin: get_option('igny8_model', 'gpt-4.1')) + model = (config or {}).get('model', 'gpt-4.1') if config else 'gpt-4.1' + + # Check if test with response is requested (reference plugin: $with_response parameter) + with_response = (config or {}).get('with_response', False) if config else False + + if with_response: + # Test with actual API call (reference plugin: test with chat completion) + request_body = { + 'model': model, + 'messages': [ + { + 'role': 'user', + 'content': 'test ping, reply with: OK! Ping Received. Also tell me: what is your maximum token limit that I can use in 1 request?' + } + ], + 'temperature': 0.7, + } + + try: + response = requests.post( + 'https://api.openai.com/v1/chat/completions', + headers={ + 'Authorization': f'Bearer {api_key}', + 'Content-Type': 'application/json', + }, + json=request_body, + timeout=15 + ) + + if response.status_code >= 200 and response.status_code < 300: + response_data = response.json() + + if 'choices' in response_data and len(response_data['choices']) > 0: + response_text = response_data['choices'][0]['message']['content'].strip() + + # Extract token usage information (reference plugin: line 269-271) + usage = response_data.get('usage', {}) + input_tokens = usage.get('prompt_tokens', 0) + output_tokens = usage.get('completion_tokens', 0) + total_tokens = usage.get('total_tokens', 0) + + # Calculate cost using model rates (reference plugin: line 274-275) + from igny8_core.utils.ai_processor import MODEL_RATES + rates = MODEL_RATES.get(model, {'input': 2.00, 'output': 8.00}) + cost = (input_tokens * rates['input'] + output_tokens * rates['output']) / 1000000 + + return success_response( + data={ + 'message': 'API connection and response test successful!', + 'model_used': model, + 'response': response_text, + 'tokens_used': f"{input_tokens} / {output_tokens}", + 'total_tokens': total_tokens, + 'cost': f'${cost:.4f}', + 'full_response': response_data, + }, + request=request + ) + else: + return error_response( + error='API responded but no content received', + errors={'response': [response.text[:500]]}, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + else: + body = response.text + # Map OpenAI API errors to appropriate HTTP status codes + # OpenAI 401 (invalid API key) should be 400 (Bad Request) in our API + # OpenAI 4xx errors are client errors (invalid request) -> 400 + # OpenAI 5xx errors are server errors -> 500 + if response.status_code == 401: + # Invalid API key - this is a validation error, not an auth error + status_code = status.HTTP_400_BAD_REQUEST + elif 400 <= response.status_code < 500: + # Other client errors from OpenAI (invalid request, rate limit, etc.) + status_code = status.HTTP_400_BAD_REQUEST + elif response.status_code >= 500: + # Server errors from OpenAI + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + else: + status_code = response.status_code + + return error_response( + error=f'HTTP {response.status_code} – {body[:200]}', + status_code=status_code, + request=request + ) + except requests.exceptions.RequestException as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + else: + # Simple connection test without API call (reference plugin: GET /v1/models) + try: + response = requests.get( + 'https://api.openai.com/v1/models', + headers={ + 'Authorization': f'Bearer {api_key}', + }, + timeout=10 + ) + + if response.status_code >= 200 and response.status_code < 300: + return success_response( + data={ + 'message': 'API connection successful!', + 'model_used': model, + 'response': 'Connection verified without API call' + }, + request=request + ) + else: + body = response.text + # Map OpenAI API errors to appropriate HTTP status codes + # OpenAI 401 (invalid API key) should be 400 (Bad Request) in our API + # OpenAI 4xx errors are client errors (invalid request) -> 400 + # OpenAI 5xx errors are server errors -> 500 + if response.status_code == 401: + # Invalid API key - this is a validation error, not an auth error + status_code = status.HTTP_400_BAD_REQUEST + elif 400 <= response.status_code < 500: + # Other client errors from OpenAI (invalid request, rate limit, etc.) + status_code = status.HTTP_400_BAD_REQUEST + elif response.status_code >= 500: + # Server errors from OpenAI + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + else: + status_code = response.status_code + + return error_response( + error=f'HTTP {response.status_code} – {body[:200]}', + status_code=status_code, + request=request + ) + except requests.exceptions.RequestException as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def _test_runware(self, api_key: str, request): + """ + Test Runware API connection using 64x64 image generation (ping validation) + Reference: Uses same format as image generation but with minimal 64x64 size for fast validation + """ + from igny8_core.utils.ai_processor import AIProcessor + + # Get account from request + account = getattr(request, 'account', None) + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + # Fallback to default account + if not account: + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + except Exception: + pass + + try: + # EXACT match to reference plugin: core/admin/ajax.php line 4946-5003 + # Reference plugin uses: 128x128, steps=2, CFGScale=5, prompt='test image connection' + import requests + import uuid + import json + + test_prompt = 'test image connection' + + # Prepare payload EXACTLY as reference plugin + payload = [ + { + 'taskType': 'authentication', + 'apiKey': api_key + }, + { + 'taskType': 'imageInference', + 'taskUUID': str(uuid.uuid4()), + 'positivePrompt': test_prompt, + 'model': 'runware:97@1', + 'width': 128, # Reference plugin uses 128x128, not 64x64 + 'height': 128, + 'negativePrompt': 'text, watermark, logo, overlay, title, caption, writing on walls, writing on objects, UI, infographic elements, post title', + 'steps': 2, # Low steps for fast testing + 'CFGScale': 5, + 'numberResults': 1 + } + ] + + logger.info("[_test_runware] Testing Runware API with 128x128 image generation (matching reference plugin)") + logger.info(f"[_test_runware] Payload: {json.dumps(payload, indent=2)}") + + # Make API request + response = requests.post( + 'https://api.runware.ai/v1', + headers={'Content-Type': 'application/json'}, + json=payload, + timeout=30 + ) + + logger.info(f"[_test_runware] Response status: {response.status_code}") + + if response.status_code != 200: + error_text = response.text + logger.error(f"[_test_runware] HTTP error {response.status_code}: {error_text[:200]}") + return error_response( + error=f'HTTP {response.status_code}: {error_text[:200]}', + status_code=response.status_code, + request=request + ) + + # Parse response - Reference plugin checks: $body['data'][0]['imageURL'] + body = response.json() + logger.info(f"[_test_runware] Response body type: {type(body)}") + logger.info(f"[_test_runware] Response body: {json.dumps(body, indent=2)[:1000]}") + + # Reference plugin line 4996: if (isset($body['data'][0]['imageURL'])) + if isinstance(body, dict) and 'data' in body: + data = body['data'] + if isinstance(data, list) and len(data) > 0: + first_item = data[0] + image_url = first_item.get('imageURL') or first_item.get('image_url') + if image_url: + logger.info(f"[_test_runware] Success! Image URL: {image_url[:50]}...") + return success_response( + data={ + 'message': '✅ Runware API connected successfully!', + 'image_url': image_url, + 'cost': '$0.0090', + 'provider': 'runware', + 'model': 'runware:97@1', + 'size': '128x128' + }, + request=request + ) + + # Check for errors - Reference plugin line 4998: elseif (isset($body['errors'][0]['message'])) + if isinstance(body, dict) and 'errors' in body: + errors = body['errors'] + if isinstance(errors, list) and len(errors) > 0: + error_msg = errors[0].get('message', 'Unknown Runware API error') + logger.error(f"[_test_runware] Runware API error: {error_msg}") + return error_response( + error=f'❌ {error_msg}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + # Unknown response format + logger.error(f"[_test_runware] Unknown response format: {body}") + return error_response( + error='❌ Unknown response from Runware.', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + logger.error(f"[_test_runware] Exception in Runware API test: {str(e)}", exc_info=True) + return error_response( + error=f'Runware API test failed: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def generate_image(self, request, pk=None, **kwargs): + """ + Generate image using the configured image generation service + POST /api/v1/system/settings/integrations/image_generation/generate/ + Note: This method is called via custom URL pattern, not @action decorator + """ + # Extract pk from kwargs if not passed as parameter (DRF passes via **kwargs) + if not pk: + pk = kwargs.get('pk') + + # Log detailed request info for debugging + logger.info("=" * 80) + logger.info("[generate_image] ENDPOINT CALLED - Image generation request received") + logger.info(f"[generate_image] pk parameter: {pk}") + logger.info(f"[generate_image] kwargs: {kwargs}") + logger.info(f"[generate_image] request.path: {request.path}") + logger.info(f"[generate_image] request.method: {request.method}") + logger.info(f"[generate_image] request.META.get('PATH_INFO'): {request.META.get('PATH_INFO')}") + logger.info(f"[generate_image] request.META.get('REQUEST_URI'): {request.META.get('REQUEST_URI', 'N/A')}") + logger.info(f"[generate_image] request.META.get('HTTP_HOST'): {request.META.get('HTTP_HOST', 'N/A')}") + logger.info(f"[generate_image] request.META.get('HTTP_REFERER'): {request.META.get('HTTP_REFERER', 'N/A')}") + logger.info(f"[generate_image] request.data: {request.data}") + + if pk != 'image_generation': + logger.error(f"[generate_image] Invalid pk: {pk}, expected 'image_generation'") + return error_response( + error=f'Image generation endpoint only available for image_generation integration, got: {pk}', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account + logger.info("[generate_image] Step 1: Getting account") + account = getattr(request, 'account', None) + if not account: + user = getattr(request, 'user', None) + logger.info(f"[generate_image] No account in request, checking user: {user}") + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + logger.info(f"[generate_image] Got account from user: {account}") + if not account: + logger.info("[generate_image] No account found, trying to get first account from DB") + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + logger.info(f"[generate_image] Got first account from DB: {account}") + except Exception as e: + logger.error(f"[generate_image] Error getting account from DB: {e}") + pass + + if not account: + logger.error("[generate_image] ERROR: No account found, returning error response") + return error_response( + error='Account not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + logger.info(f"[generate_image] Account resolved: {account.id if account else 'None'}") + + # Get request parameters + logger.info("[generate_image] Step 2: Extracting request parameters") + prompt = request.data.get('prompt', '') + negative_prompt = request.data.get('negative_prompt', '') + image_type = request.data.get('image_type', 'realistic') + image_size = request.data.get('image_size', '1024x1024') + image_format = request.data.get('image_format', 'webp') + provider = request.data.get('provider', 'openai') + model = request.data.get('model', 'dall-e-3') + + logger.info(f"[generate_image] Request parameters: provider={provider}, model={model}, image_type={image_type}, image_size={image_size}, prompt_length={len(prompt)}") + logger.info(f"[generate_image] IMPORTANT: Using ONLY {provider.upper()} provider for this request. NOT using both providers.") + + if not prompt: + logger.error("[generate_image] ERROR: Prompt is empty") + return error_response( + error='Prompt is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get API key from saved settings for the specified provider only + logger.info(f"[generate_image] Step 3: Getting API key for provider: {provider}") + from .models import IntegrationSettings + + # Only fetch settings for the specified provider + api_key = None + integration_enabled = False + integration_type = provider # 'openai' or 'runware' + + try: + integration_settings = IntegrationSettings.objects.get( + integration_type=integration_type, + account=account + ) + api_key = integration_settings.config.get('apiKey') + integration_enabled = integration_settings.is_active + logger.info(f"[generate_image] {integration_type.upper()} settings found: enabled={integration_enabled}, has_key={bool(api_key)}") + except IntegrationSettings.DoesNotExist: + logger.warning(f"[generate_image] {integration_type.upper()} settings not found in database") + api_key = None + integration_enabled = False + except Exception as e: + logger.error(f"[generate_image] Error getting {integration_type.upper()} settings: {e}") + api_key = None + integration_enabled = False + + # Validate provider and API key + logger.info(f"[generate_image] Step 4: Validating {provider} provider and API key") + if provider not in ['openai', 'runware']: + logger.error(f"[generate_image] ERROR: Invalid provider: {provider}") + return error_response( + error=f'Invalid provider: {provider}. Must be "openai" or "runware"', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + if not api_key or not integration_enabled: + logger.error(f"[generate_image] ERROR: {provider.upper()} API key not configured or integration not enabled") + return error_response( + error=f'{provider.upper()} API key not configured or integration not enabled', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + logger.info(f"[generate_image] {provider.upper()} API key validated successfully") + + # Generate image using AIProcessor + logger.info("[generate_image] Step 5: Creating AIProcessor and generating image") + try: + from igny8_core.utils.ai_processor import AIProcessor + processor = AIProcessor(account=account) + logger.info("[generate_image] AIProcessor created successfully") + + # Parse size + width, height = map(int, image_size.split('x')) + size_str = f'{width}x{height}' + logger.info(f"[generate_image] Image size parsed: {size_str}") + + logger.info(f"[generate_image] Calling processor.generate_image with: provider={provider}, model={model}, size={size_str}") + result = processor.generate_image( + prompt=prompt, + provider=provider, + model=model, + size=size_str, + n=1, + api_key=api_key, + negative_prompt=negative_prompt if provider == 'runware' else None, # OpenAI doesn't support negative prompts + ) + + logger.info(f"[generate_image] AIProcessor.generate_image returned: has_url={bool(result.get('url'))}, has_error={bool(result.get('error'))}") + + if result.get('error'): + logger.error(f"[generate_image] ERROR from AIProcessor: {result.get('error')}") + return error_response( + error=result['error'], + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + logger.info("[generate_image] Image generation successful, returning response") + response_data = { + 'image_url': result.get('url'), + 'revised_prompt': result.get('revised_prompt'), + 'model': model, + 'provider': provider, + 'cost': f"${result.get('cost', 0):.4f}" if result.get('cost') else None, + } + logger.info(f"[generate_image] Returning success response: {response_data}") + return success_response( + data=response_data, + request=request + ) + except Exception as e: + logger.error(f"[generate_image] EXCEPTION in image generation: {str(e)}", exc_info=True) + return error_response( + error=f'Failed to generate image: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def create(self, request): + """Create integration settings""" + integration_type = request.data.get('integration_type') + if not integration_type: + return error_response( + error='integration_type is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + return self.save_settings(request, integration_type) + + def save_settings(self, request, pk=None): + """Save integration settings""" + integration_type = pk # 'openai', 'runware', 'gsc' + + logger.info(f"[save_settings] Called for integration_type={integration_type}, user={getattr(request, 'user', None)}, account={getattr(request, 'account', None)}") + + if not integration_type: + return error_response( + error='Integration type is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Ensure config is a dict + config = dict(request.data) if hasattr(request.data, 'dict') else (request.data if isinstance(request.data, dict) else {}) + logger.info(f"[save_settings] Config keys: {list(config.keys()) if isinstance(config, dict) else 'Not a dict'}") + + try: + # Get account - try multiple methods + account = getattr(request, 'account', None) + logger.info(f"[save_settings] Account from request: {account.id if account else None}") + + # Fallback 1: Get from authenticated user's account + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + try: + account = getattr(user, 'account', None) + except Exception as e: + logger.warning(f"Error getting account from user: {e}") + account = None + + # Fallback 2: If still no account, get default account (for development) + if not account: + from igny8_core.auth.models import Account + try: + # Get the first account as fallback (development only) + account = Account.objects.first() + except Exception as e: + logger.warning(f"Error getting default account: {e}") + account = None + + if not account: + logger.error(f"[save_settings] No account found after all fallbacks") + return error_response( + error='Account not found. Please ensure you are logged in.', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + logger.info(f"[save_settings] Using account: {account.id} ({account.name}, slug={account.slug}, status={account.status})") + + # Store integration settings in a simple model or settings table + # For now, we'll use a simple approach - store in IntegrationSettings model + # or use Django settings/database + + # Import IntegrationSettings model + from .models import IntegrationSettings + + # For image_generation, ensure provider is set correctly + if integration_type == 'image_generation': + # Map service to provider if service is provided + if 'service' in config and 'provider' not in config: + config['provider'] = config['service'] + # Ensure provider is set + if 'provider' not in config: + config['provider'] = config.get('service', 'openai') + # Set model based on provider + if config.get('provider') == 'openai' and 'model' not in config: + config['model'] = config.get('imageModel', 'dall-e-3') + elif config.get('provider') == 'runware' and 'model' not in config: + config['model'] = config.get('runwareModel', 'runware:97@1') + # Ensure all image settings have defaults + config.setdefault('image_type', 'realistic') + config.setdefault('max_in_article_images', 2) + config.setdefault('image_format', 'webp') + config.setdefault('desktop_enabled', True) + config.setdefault('mobile_enabled', True) + + # Set default image sizes based on provider/model + provider = config.get('provider', 'openai') + model = config.get('model', 'dall-e-3') + + if not config.get('featured_image_size'): + if provider == 'runware': + config['featured_image_size'] = '1280x832' + else: # openai + config['featured_image_size'] = '1024x1024' + + if not config.get('desktop_image_size'): + config['desktop_image_size'] = '1024x1024' + + # Get or create integration settings + logger.info(f"[save_settings] Attempting get_or_create for {integration_type} with account {account.id}") + integration_settings, created = IntegrationSettings.objects.get_or_create( + integration_type=integration_type, + account=account, + defaults={'config': config, 'is_active': config.get('enabled', False)} + ) + logger.info(f"[save_settings] get_or_create result: created={created}, id={integration_settings.id}") + + if not created: + logger.info(f"[save_settings] Updating existing settings (id={integration_settings.id})") + integration_settings.config = config + integration_settings.is_active = config.get('enabled', False) + integration_settings.save() + logger.info(f"[save_settings] Settings updated successfully") + + logger.info(f"[save_settings] Successfully saved settings for {integration_type}") + return success_response( + data={'config': config}, + message=f'{integration_type.upper()} settings saved successfully', + request=request + ) + + except Exception as e: + logger.error(f"Error saving integration settings for {integration_type}: {str(e)}", exc_info=True) + import traceback + error_trace = traceback.format_exc() + logger.error(f"Full traceback: {error_trace}") + return error_response( + error=f'Failed to save settings: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + def get_settings(self, request, pk=None): + """Get integration settings - defaults to AWS-admin settings if account doesn't have its own""" + integration_type = pk + + if not integration_type: + return error_response( + error='Integration type is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + try: + # Get account - try multiple methods (same as save_settings) + account = getattr(request, 'account', None) + + # Fallback 1: Get from authenticated user's account + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + try: + account = getattr(user, 'account', None) + except Exception as e: + logger.warning(f"Error getting account from user: {e}") + account = None + + from .models import IntegrationSettings + + # Get account-specific settings + if account: + try: + integration_settings = IntegrationSettings.objects.get( + integration_type=integration_type, + account=account + ) + return success_response( + data=integration_settings.config, + request=request + ) + except IntegrationSettings.DoesNotExist: + pass + except Exception as e: + logger.error(f"Error getting account-specific settings: {e}", exc_info=True) + + # Return empty config if no settings found + return success_response( + data={}, + request=request + ) + except Exception as e: + logger.error(f"Unexpected error in get_settings for {integration_type}: {e}", exc_info=True) + return error_response( + error=f'Failed to get settings: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=False, methods=['get'], url_path='image_generation', url_name='image_generation_settings') + def get_image_generation_settings(self, request): + """Get image generation settings for current account""" + account = getattr(request, 'account', None) + + if not account: + # Fallback to user's account + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + # Fallback to default account + if not account: + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + except Exception: + pass + + if not account: + return error_response( + error='Account not found', + status_code=status.HTTP_401_UNAUTHORIZED, + request=request + ) + + try: + from .models import IntegrationSettings + integration = IntegrationSettings.objects.get( + account=account, + integration_type='image_generation', + is_active=True + ) + + config = integration.config or {} + + # Debug: Log what's actually in the config + logger.info(f"[get_image_generation_settings] Full config: {config}") + logger.info(f"[get_image_generation_settings] Config keys: {list(config.keys())}") + logger.info(f"[get_image_generation_settings] model field: {config.get('model')}") + logger.info(f"[get_image_generation_settings] imageModel field: {config.get('imageModel')}") + + # Get model - try 'model' first, then 'imageModel' as fallback + model = config.get('model') or config.get('imageModel') or 'dall-e-3' + + # Set defaults for image sizes if not present + provider = config.get('provider', 'openai') + default_featured_size = '1280x832' if provider == 'runware' else '1024x1024' + + return success_response( + data={ + 'config': { + 'provider': config.get('provider', 'openai'), + 'model': model, + 'image_type': config.get('image_type', 'realistic'), + 'max_in_article_images': config.get('max_in_article_images', 2), + 'image_format': config.get('image_format', 'webp'), + 'desktop_enabled': config.get('desktop_enabled', True), + 'mobile_enabled': config.get('mobile_enabled', True), + 'featured_image_size': config.get('featured_image_size', default_featured_size), + 'desktop_image_size': config.get('desktop_image_size', '1024x1024'), + } + }, + request=request + ) + except IntegrationSettings.DoesNotExist: + return success_response( + data={ + 'config': { + 'provider': 'openai', + 'model': 'dall-e-3', + 'image_type': 'realistic', + 'max_in_article_images': 2, + 'image_format': 'webp', + 'desktop_enabled': True, + 'mobile_enabled': True, + } + }, + request=request + ) + except Exception as e: + logger.error(f"[get_image_generation_settings] Error: {str(e)}", exc_info=True) + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=False, methods=['get'], url_path='task_progress/(?P[^/.]+)', url_name='task-progress') + def task_progress(self, request, task_id=None): + """ + Get Celery task progress status + GET /api/v1/system/settings/task_progress// + """ + if not task_id: + return error_response( + error='Task ID is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + import logging + logger = logging.getLogger(__name__) + + try: + # Try to import Celery AsyncResult + try: + from celery.result import AsyncResult + from kombu.exceptions import OperationalError as KombuOperationalError + # Try to import redis ConnectionError, but it might not be available + try: + from redis.exceptions import ConnectionError as RedisConnectionError + except ImportError: + # Redis might not be installed or ConnectionError might not exist + RedisConnectionError = ConnectionError + except ImportError: + logger.warning("Celery not available - task progress cannot be retrieved") + return success_response( + data={ + 'state': 'PENDING', + 'meta': { + 'percentage': 0, + 'message': 'Celery not available - cannot retrieve task status', + 'error': 'Celery not configured' + } + }, + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + request=request + ) + + try: + # Create AsyncResult - this should not raise an exception even if task doesn't exist + task = AsyncResult(task_id) + + # Safely get task state - accessing task.state can raise ValueError if exception info is malformed + # or ConnectionError if backend is unavailable + try: + task_state = task.state + except (ValueError, KeyError) as state_exc: + # Task has malformed exception info - try to get error from multiple sources + logger.warning(f"Error accessing task.state (malformed exception info): {str(state_exc)}") + error_msg = 'Task failed - exception details unavailable' + error_type = 'UnknownError' + request_steps = [] + response_steps = [] + + # First, try to get from backend's stored meta (most reliable for our update_state calls) + try: + backend = task.backend + if hasattr(backend, 'get_task_meta'): + stored_meta = backend.get_task_meta(task_id) + if stored_meta and isinstance(stored_meta, dict): + meta = stored_meta.get('meta', {}) + if isinstance(meta, dict): + if 'error' in meta: + error_msg = meta.get('error') + if 'error_type' in meta: + error_type = meta.get('error_type', error_type) + if 'request_steps' in meta: + request_steps = meta.get('request_steps', []) + if 'response_steps' in meta: + response_steps = meta.get('response_steps', []) + except Exception as e: + logger.debug(f"Error getting from backend meta: {str(e)}") + + # Try to get error from task.result + if error_msg == 'Task failed - exception details unavailable': + try: + if hasattr(task, 'result'): + result = task.result + if isinstance(result, dict): + error_msg = result.get('error', error_msg) + error_type = result.get('error_type', error_type) + request_steps = result.get('request_steps', request_steps) + response_steps = result.get('response_steps', response_steps) + elif isinstance(result, str): + error_msg = result + except Exception as e: + logger.debug(f"Error extracting error from task.result: {str(e)}") + + # Also try to get error from task.info + if error_msg == 'Task failed - exception details unavailable': + try: + if hasattr(task, 'info') and task.info: + if isinstance(task.info, dict): + if 'error' in task.info: + error_msg = task.info['error'] + if 'error_type' in task.info: + error_type = task.info['error_type'] + if 'request_steps' in task.info: + request_steps = task.info.get('request_steps', request_steps) + if 'response_steps' in task.info: + response_steps = task.info.get('response_steps', response_steps) + except Exception as e: + logger.debug(f"Error extracting error from task.info: {str(e)}") + + return success_response( + data={ + 'state': 'FAILURE', + 'meta': { + 'error': error_msg, + 'error_type': error_type, + 'percentage': 0, + 'message': f'Error: {error_msg}', + 'request_steps': request_steps, + 'response_steps': response_steps, + } + }, + request=request + ) + except (KombuOperationalError, RedisConnectionError, ConnectionError) as conn_exc: + # Backend connection error - task might not be registered yet or backend is down + logger.warning(f"Backend connection error accessing task.state for {task_id}: {type(conn_exc).__name__}: {str(conn_exc)}") + return success_response( + data={ + 'state': 'PENDING', + 'meta': { + 'percentage': 0, + 'message': 'Task is being queued...', + 'phase': 'initializing', + 'error': None # Don't show as error, just pending + } + }, + request=request + ) + except Exception as state_exc: + logger.error(f"Unexpected error accessing task.state: {type(state_exc).__name__}: {str(state_exc)}") + return success_response( + data={ + 'state': 'UNKNOWN', + 'meta': { + 'error': f'Error accessing task: {str(state_exc)}', + 'percentage': 0, + 'message': f'Error: {str(state_exc)}', + } + }, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + # Check if task exists and is accessible + if task_state is None: + # Task doesn't exist or hasn't been registered yet + return success_response( + data={ + 'state': 'PENDING', + 'meta': { + 'percentage': 0, + 'message': 'Task not found or not yet registered', + 'phase': 'initializing', + } + }, + request=request + ) + + # Safely get task info/result + # Try to get error from multiple sources + task_result = None + task_info = None + error_message = None + error_type = None + + # First, try to get from backend's stored meta (most reliable for our update_state calls) + try: + backend = task.backend + if hasattr(backend, 'get_task_meta'): + stored_meta = backend.get_task_meta(task_id) + if stored_meta and isinstance(stored_meta, dict): + meta = stored_meta.get('meta', {}) + if isinstance(meta, dict): + if 'error' in meta: + error_message = meta.get('error') + error_type = meta.get('error_type', 'UnknownError') + except Exception as backend_err: + logger.debug(f"Could not get from backend meta: {backend_err}") + + try: + # Try to get result first - this often has the actual error + if not error_message and hasattr(task, 'result'): + try: + task_result = task.result + # If result is a dict with error, extract it + if isinstance(task_result, dict): + if 'error' in task_result: + error_message = task_result.get('error') + error_type = task_result.get('error_type', 'UnknownError') + elif 'success' in task_result and not task_result.get('success'): + error_message = task_result.get('error', 'Task failed') + error_type = task_result.get('error_type', 'UnknownError') + except Exception: + pass # Will try task.info next + except Exception: + pass + + # Then try task.info + if not error_message and hasattr(task, 'info'): + try: + task_info = task.info + if isinstance(task_info, dict): + if 'error' in task_info: + error_message = task_info.get('error') + error_type = task_info.get('error_type', 'UnknownError') + except (ValueError, KeyError, AttributeError) as info_exc: + # Log the actual exception that occurred + logger.error(f"Error accessing task.info for {task_id}: {type(info_exc).__name__}: {str(info_exc)}", exc_info=True) + # Try to get error from traceback if available + try: + if hasattr(task, 'traceback'): + error_message = f"Task failed: {str(task.traceback)}" + except: + pass + except (KombuOperationalError, RedisConnectionError, ConnectionError) as conn_exc: + # Backend connection error - task might not be registered yet + logger.warning(f"Backend connection error accessing task.info for {task_id}: {str(conn_exc)}") + task_info = None + except Exception as e: + logger.error(f"Unexpected error accessing task.info: {type(e).__name__}: {str(e)}", exc_info=True) + else: + if not hasattr(task, 'info'): + task_info = None + + # If still no error message, try to get from task.result again + if not error_message and hasattr(task, 'result'): + try: + task_result = task.result + if isinstance(task_result, dict): + if 'error' in task_result: + error_message = task_result.get('error') + error_type = task_result.get('error_type', 'UnknownError') + elif 'success' in task_result and not task_result.get('success'): + error_message = task_result.get('error', 'Task failed') + error_type = task_result.get('error_type', 'UnknownError') + elif isinstance(task_result, str): + error_message = task_result + elif isinstance(task_result, Exception): + error_message = str(task_result) + error_type = type(task_result).__name__ + except (ValueError, KeyError) as result_exc: + logger.warning(f"Error accessing task.result: {str(result_exc)}") + task_result = None + except (KombuOperationalError, RedisConnectionError, ConnectionError) as conn_exc: + # Backend connection error + logger.warning(f"Backend connection error accessing task.result for {task_id}: {str(conn_exc)}") + task_result = None + except Exception as info_error: + logger.warning(f"Unexpected error accessing task result: {str(info_error)}") + task_result = None + + # Use extracted error or fallback - try traceback as last resort + if not error_message: + try: + if hasattr(task, 'traceback') and task.traceback: + error_message = f"Task failed: {str(task.traceback)}" + except Exception: + pass + + if not error_message: + error_message = f"Task failed - check Celery worker logs for task {task_id}" + + if task_state == 'PROGRESS': + meta = task_info or {} + response_meta = { + 'current': meta.get('current', 0) if isinstance(meta, dict) else 0, + 'total': meta.get('total', 0) if isinstance(meta, dict) else 0, + 'percentage': meta.get('percentage', 0) if isinstance(meta, dict) else 0, + 'message': meta.get('message', 'Processing...') if isinstance(meta, dict) else 'Processing...', + 'phase': meta.get('phase', 'processing') if isinstance(meta, dict) else 'processing', + 'current_item': meta.get('current_item') if isinstance(meta, dict) else None, + 'completed': meta.get('completed', 0) if isinstance(meta, dict) else 0, + # Image generation progress fields + 'current_image': meta.get('current_image') if isinstance(meta, dict) else None, + 'current_image_id': meta.get('current_image_id') if isinstance(meta, dict) else None, + 'current_image_progress': meta.get('current_image_progress') if isinstance(meta, dict) else None, + 'total_images': meta.get('total_images') if isinstance(meta, dict) else None, + 'failed': meta.get('failed', 0) if isinstance(meta, dict) else 0, + 'results': meta.get('results', []) if isinstance(meta, dict) else [], + } + # Include step logs if available + if isinstance(meta, dict): + if 'request_steps' in meta: + response_meta['request_steps'] = meta['request_steps'] + if 'response_steps' in meta: + response_meta['response_steps'] = meta['response_steps'] + # Include image_queue if available (for image generation) + if 'image_queue' in meta: + response_meta['image_queue'] = meta['image_queue'] + return success_response( + data={ + 'state': task_state, + 'meta': response_meta + }, + request=request + ) + elif task_state == 'SUCCESS': + result = task_result or {} + meta = result if isinstance(result, dict) else {} + response_meta = { + 'percentage': 100, + 'message': meta.get('message', 'Task completed successfully') if isinstance(meta, dict) else 'Task completed successfully', + 'result': result, + 'details': meta if isinstance(meta, dict) else {}, + } + # Include step logs if available + if isinstance(meta, dict): + if 'request_steps' in meta: + response_meta['request_steps'] = meta['request_steps'] + if 'response_steps' in meta: + response_meta['response_steps'] = meta['response_steps'] + return success_response( + data={ + 'state': task_state, + 'meta': response_meta + }, + request=request + ) + elif task_state == 'FAILURE': + # Try to get error from task.info meta first (this is where run_ai_task sets it) + if not error_message and isinstance(task_info, dict): + error_message = task_info.get('error') or task_info.get('message', '') + error_type = task_info.get('error_type', 'UnknownError') + # Also check if message contains error info + if not error_message and 'message' in task_info: + msg = task_info.get('message', '') + if msg and 'Error:' in msg: + error_message = msg.replace('Error: ', '') + + # Use extracted error_message if available, otherwise try to get from error_info + if not error_message: + error_info = task_info + if isinstance(error_info, Exception): + error_message = str(error_info) + elif isinstance(error_info, dict): + error_message = error_info.get('error') or error_info.get('message', '') or str(error_info) + elif error_info: + error_message = str(error_info) + + # Final fallback - ensure we always have an error message + if not error_message or error_message.strip() == '': + error_message = f'Task execution failed - check Celery worker logs for task {task_id}' + error_type = 'ExecutionError' + + # If still no error message, try to get from task backend directly + if not error_message: + try: + # Try to get from backend's stored result + backend = task.backend + if hasattr(backend, 'get'): + stored = backend.get(task_id) + if stored and isinstance(stored, dict): + if 'error' in stored: + error_message = stored['error'] + elif isinstance(stored.get('result'), dict) and 'error' in stored['result']: + error_message = stored['result']['error'] + except Exception as backend_err: + logger.warning(f"Error getting from backend: {backend_err}") + + # Final fallback + if not error_message: + error_message = 'Task failed - check backend logs for details' + + response_meta = { + 'error': error_message, + 'percentage': 0, + 'message': f'Error: {error_message}', + } + + # Include error_type if available + if error_type: + response_meta['error_type'] = error_type + + # Include step logs if available (from task result or error_info) + result = task_result or {} + meta = result if isinstance(result, dict) else (task_info if isinstance(task_info, dict) else {}) + if isinstance(meta, dict): + if 'request_steps' in meta: + response_meta['request_steps'] = meta['request_steps'] + if 'response_steps' in meta: + response_meta['response_steps'] = meta['response_steps'] + # Also include error_type if available in meta + if 'error_type' in meta and not error_type: + response_meta['error_type'] = meta['error_type'] + # Also check for error in meta directly + if 'error' in meta and not error_message: + error_message = meta['error'] + response_meta['error'] = error_message + if 'error_type' in meta and not error_type: + error_type = meta['error_type'] + response_meta['error_type'] = error_type + + return success_response( + data={ + 'state': task_state, + 'meta': response_meta + }, + request=request + ) + else: + # PENDING, STARTED, or other states + return success_response( + data={ + 'state': task_state, + 'meta': { + 'percentage': 0, + 'message': 'Task is starting...', + 'phase': 'initializing', + } + }, + request=request + ) + except (KombuOperationalError, RedisConnectionError, ConnectionError) as conn_error: + # Backend connection error - task might not be registered yet or backend is down + logger.warning(f"Backend connection error for task {task_id}: {type(conn_error).__name__}: {str(conn_error)}") + return success_response( + data={ + 'state': 'PENDING', + 'meta': { + 'percentage': 0, + 'message': 'Task is being queued...', + 'phase': 'initializing', + 'error': None # Don't show as error, just pending + } + }, + request=request + ) + except Exception as task_error: + logger.error(f"Error accessing Celery task {task_id}: {type(task_error).__name__}: {str(task_error)}", exc_info=True) + return success_response( + data={ + 'state': 'UNKNOWN', + 'meta': { + 'percentage': 0, + 'message': f'Error accessing task: {str(task_error)}', + 'error': str(task_error) + } + }, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + except Exception as e: + # Check if it's a connection-related error - treat as PENDING instead of error + error_type = type(e).__name__ + error_str = str(e).lower() + is_connection_error = ( + 'connection' in error_str or + 'connect' in error_str or + 'timeout' in error_str or + 'unavailable' in error_str or + 'network' in error_str or + error_type in ('ConnectionError', 'TimeoutError', 'OperationalError') + ) + + if is_connection_error: + logger.warning(f"Connection error getting task progress for {task_id}: {error_type}: {str(e)}") + return success_response( + data={ + 'state': 'PENDING', + 'meta': { + 'percentage': 0, + 'message': 'Task is being queued...', + 'phase': 'initializing', + 'error': None + } + }, + request=request + ) + else: + logger.error(f"Error getting task progress for {task_id}: {error_type}: {str(e)}", exc_info=True) + return success_response( + data={ + 'state': 'ERROR', + 'meta': { + 'error': f'Error getting task status: {str(e)}', + 'percentage': 0, + 'message': f'Error: {str(e)}' + } + }, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + diff --git a/tenant/backend/igny8_core/modules/system/views.py b/tenant/backend/igny8_core/modules/system/views.py new file mode 100644 index 00000000..b23c0b26 --- /dev/null +++ b/tenant/backend/igny8_core/modules/system/views.py @@ -0,0 +1,782 @@ +""" +System module views - for global settings and prompts +""" +import psutil +import os +import logging +from rest_framework import viewsets, status as http_status, filters +from rest_framework.decorators import action, api_view, permission_classes +from rest_framework.response import Response +from rest_framework.permissions import AllowAny +from django.db import transaction, connection +from django.core.cache import cache +from django.utils import timezone +from django_filters.rest_framework import DjangoFilterBackend +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import AccountModelViewSet +from igny8_core.api.response import success_response, error_response +from igny8_core.api.permissions import IsEditorOrAbove, IsAuthenticatedAndActive, IsViewerOrAbove, HasTenantAccess +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.pagination import CustomPageNumberPagination +from .models import AIPrompt, AuthorProfile, Strategy +from .serializers import AIPromptSerializer, AuthorProfileSerializer, StrategySerializer + +logger = logging.getLogger(__name__) + + +@extend_schema_view( + list=extend_schema(tags=['System']), + create=extend_schema(tags=['System']), + retrieve=extend_schema(tags=['System']), + update=extend_schema(tags=['System']), + partial_update=extend_schema(tags=['System']), + destroy=extend_schema(tags=['System']), +) +class AIPromptViewSet(AccountModelViewSet): + """ + ViewSet for managing AI prompts + Unified API Standard v1.0 compliant + """ + queryset = AIPrompt.objects.all() + serializer_class = AIPromptSerializer + permission_classes = [IsAuthenticatedAndActive, HasTenantAccess] + throttle_scope = 'system' + throttle_classes = [DebugScopedRateThrottle] + pagination_class = CustomPageNumberPagination # Explicitly use custom pagination + + def get_queryset(self): + """Get prompts for the current account""" + return super().get_queryset().order_by('prompt_type') + + @action(detail=False, methods=['get'], url_path='by_type/(?P[^/.]+)', url_name='by_type') + def get_by_type(self, request, prompt_type=None): + """Get prompt by type""" + try: + prompt = self.get_queryset().get(prompt_type=prompt_type) + serializer = self.get_serializer(prompt) + return success_response(data=serializer.data, request=request) + except AIPrompt.DoesNotExist: + # Return default if not found + from .utils import get_default_prompt + default_value = get_default_prompt(prompt_type) + return success_response( + data={ + 'prompt_type': prompt_type, + 'prompt_value': default_value, + 'default_prompt': default_value, + 'is_active': True, + }, + request=request + ) + + @action(detail=False, methods=['post'], url_path='save', url_name='save') + def save_prompt(self, request): + """Save or update a prompt - requires editor or above""" + # Check if user has editor or above permissions + if not IsEditorOrAbove().has_permission(request, self): + return error_response( + error='Permission denied. Editor or above role required.', + status_code=http_status.HTTP_403_FORBIDDEN, + request=request + ) + + prompt_type = request.data.get('prompt_type') + prompt_value = request.data.get('prompt_value') + + if not prompt_type: + return error_response( + error='prompt_type is required', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + if prompt_value is None: + return error_response( + error='prompt_value is required', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account - try multiple methods + account = getattr(request, 'account', None) + + # Fallback 1: Get from authenticated user's account + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + + # Fallback 2: If still no account, get default account (for development) + if not account: + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + except Exception: + pass + + if not account: + return error_response( + error='Account not found. Please ensure you are logged in.', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get default prompt value if creating new + from .utils import get_default_prompt + default_value = get_default_prompt(prompt_type) + + # Get or create prompt + prompt, created = AIPrompt.objects.get_or_create( + prompt_type=prompt_type, + account=account, + defaults={ + 'prompt_value': prompt_value, + 'default_prompt': default_value, + 'is_active': True, + } + ) + + if not created: + prompt.prompt_value = prompt_value + prompt.save() + + serializer = self.get_serializer(prompt) + return success_response( + data=serializer.data, + message=f'{prompt.get_prompt_type_display()} saved successfully', + request=request + ) + + @action(detail=False, methods=['post'], url_path='reset', url_name='reset') + def reset_prompt(self, request): + """Reset prompt to default - requires editor or above""" + # Check if user has editor or above permissions + if not IsEditorOrAbove().has_permission(request, self): + return error_response( + error='Permission denied. Editor or above role required.', + status_code=http_status.HTTP_403_FORBIDDEN, + request=request + ) + + prompt_type = request.data.get('prompt_type') + + if not prompt_type: + return error_response( + error='prompt_type is required', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account - try multiple methods (same as integration_views) + account = getattr(request, 'account', None) + + # Fallback 1: Get from authenticated user's account + if not account: + user = getattr(request, 'user', None) + if user and hasattr(user, 'is_authenticated') and user.is_authenticated: + account = getattr(user, 'account', None) + + # Fallback 2: If still no account, get default account (for development) + if not account: + from igny8_core.auth.models import Account + try: + account = Account.objects.first() + except Exception: + pass + + if not account: + return error_response( + error='Account not found. Please ensure you are logged in.', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get default prompt + from .utils import get_default_prompt + default_value = get_default_prompt(prompt_type) + + # Update or create prompt + prompt, created = AIPrompt.objects.get_or_create( + prompt_type=prompt_type, + account=account, + defaults={ + 'prompt_value': default_value, + 'default_prompt': default_value, + 'is_active': True, + } + ) + + if not created: + prompt.prompt_value = default_value + prompt.save() + + serializer = self.get_serializer(prompt) + return success_response( + data=serializer.data, + message=f'{prompt.get_prompt_type_display()} reset to default', + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['System']), + create=extend_schema(tags=['System']), + retrieve=extend_schema(tags=['System']), + update=extend_schema(tags=['System']), + partial_update=extend_schema(tags=['System']), + destroy=extend_schema(tags=['System']), +) +class AuthorProfileViewSet(AccountModelViewSet): + """ + ViewSet for managing Author Profiles + Unified API Standard v1.0 compliant + """ + queryset = AuthorProfile.objects.all() + serializer_class = AuthorProfileSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + throttle_scope = 'system' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + search_fields = ['name', 'description', 'tone'] + ordering_fields = ['name', 'created_at', 'updated_at'] + ordering = ['name'] + filterset_fields = ['is_active', 'language'] + + +@extend_schema_view( + list=extend_schema(tags=['System']), + create=extend_schema(tags=['System']), + retrieve=extend_schema(tags=['System']), + update=extend_schema(tags=['System']), + partial_update=extend_schema(tags=['System']), + destroy=extend_schema(tags=['System']), +) +class StrategyViewSet(AccountModelViewSet): + """ + ViewSet for managing Strategies + Unified API Standard v1.0 compliant + """ + queryset = Strategy.objects.all() + serializer_class = StrategySerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + throttle_scope = 'system' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + search_fields = ['name', 'description'] + ordering_fields = ['name', 'created_at', 'updated_at'] + ordering = ['name'] + filterset_fields = ['is_active', 'sector'] + + +@api_view(['GET']) +@permission_classes([AllowAny]) # Public endpoint +@extend_schema( + tags=['System'], + summary='Health Check', + description='Simple health check endpoint to verify API is responding' +) +def ping(request): + """ + Simple health check endpoint + Returns unified format: {success: true, data: {status: 'ok'}} + """ + return success_response( + data={'status': 'ok'}, + request=request + ) + + +@api_view(['GET']) +@permission_classes([AllowAny]) # Public endpoint for monitoring +def system_status(request): + """ + Comprehensive system status endpoint for monitoring + Returns CPU, memory, disk, database, Redis, Celery, and process information + """ + status_data = { + 'timestamp': timezone.now().isoformat(), + 'system': {}, + 'database': {}, + 'redis': {}, + 'celery': {}, + 'processes': {}, + 'modules': {}, + } + + try: + # System Resources + cpu_percent = psutil.cpu_percent(interval=1) + cpu_count = psutil.cpu_count() + memory = psutil.virtual_memory() + disk = psutil.disk_usage('/') + + status_data['system'] = { + 'cpu': { + 'usage_percent': cpu_percent, + 'cores': cpu_count, + 'status': 'healthy' if cpu_percent < 80 else 'warning' if cpu_percent < 95 else 'critical' + }, + 'memory': { + 'total_gb': round(memory.total / (1024**3), 2), + 'used_gb': round(memory.used / (1024**3), 2), + 'available_gb': round(memory.available / (1024**3), 2), + 'usage_percent': memory.percent, + 'status': 'healthy' if memory.percent < 80 else 'warning' if memory.percent < 95 else 'critical' + }, + 'disk': { + 'total_gb': round(disk.total / (1024**3), 2), + 'used_gb': round(disk.used / (1024**3), 2), + 'free_gb': round(disk.free / (1024**3), 2), + 'usage_percent': disk.percent, + 'status': 'healthy' if disk.percent < 80 else 'warning' if disk.percent < 95 else 'critical' + } + } + except Exception as e: + logger.error(f"Error getting system resources: {str(e)}") + status_data['system'] = {'error': str(e)} + + try: + # Database Status + with connection.cursor() as cursor: + cursor.execute("SELECT 1") + db_conn = True + cursor.execute("SELECT version()") + db_version = cursor.fetchone()[0] if cursor.rowcount > 0 else 'Unknown' + + # Get database size (PostgreSQL) + try: + cursor.execute(""" + SELECT pg_size_pretty(pg_database_size(current_database())) + """) + db_size = cursor.fetchone()[0] if cursor.rowcount > 0 else 'Unknown' + except: + db_size = 'Unknown' + + # Count active connections + try: + cursor.execute("SELECT count(*) FROM pg_stat_activity WHERE state = 'active'") + active_connections = cursor.fetchone()[0] if cursor.rowcount > 0 else 0 + except: + active_connections = 0 + + status_data['database'] = { + 'connected': db_conn, + 'version': db_version, + 'size': db_size, + 'active_connections': active_connections, + 'status': 'healthy' if db_conn else 'critical' + } + except Exception as e: + logger.error(f"Error getting database status: {str(e)}") + status_data['database'] = {'connected': False, 'error': str(e), 'status': 'critical'} + + try: + # Redis Status + redis_conn = False + redis_info = {} + try: + cache.set('status_check', 'ok', 10) + test_value = cache.get('status_check') + redis_conn = test_value == 'ok' + + # Try to get Redis info if available + if hasattr(cache, 'client'): + try: + redis_client = cache.client.get_client() + redis_info = redis_client.info() + except: + pass + except Exception as e: + redis_conn = False + redis_info = {'error': str(e)} + + status_data['redis'] = { + 'connected': redis_conn, + 'status': 'healthy' if redis_conn else 'critical', + 'info': redis_info if redis_info else {} + } + except Exception as e: + logger.error(f"Error getting Redis status: {str(e)}") + status_data['redis'] = {'connected': False, 'error': str(e), 'status': 'critical'} + + try: + # Celery Status + celery_workers = [] + celery_tasks = { + 'active': 0, + 'scheduled': 0, + 'reserved': 0, + } + + try: + from celery import current_app + inspect = current_app.control.inspect() + + # Get active workers + active_workers = inspect.active() or {} + scheduled = inspect.scheduled() or {} + reserved = inspect.reserved() or {} + + celery_workers = list(active_workers.keys()) + celery_tasks['active'] = sum(len(tasks) for tasks in active_workers.values()) + celery_tasks['scheduled'] = sum(len(tasks) for tasks in scheduled.values()) + celery_tasks['reserved'] = sum(len(tasks) for tasks in reserved.values()) + + except Exception as e: + logger.warning(f"Error getting Celery status: {str(e)}") + celery_workers = [] + celery_tasks = {'error': str(e)} + + status_data['celery'] = { + 'workers': celery_workers, + 'worker_count': len(celery_workers), + 'tasks': celery_tasks, + 'status': 'healthy' if len(celery_workers) > 0 else 'warning' + } + except Exception as e: + logger.error(f"Error getting Celery status: {str(e)}") + status_data['celery'] = {'error': str(e), 'status': 'warning'} + + try: + # Process Monitoring by Stack/Component + processes = { + 'gunicorn': [], + 'celery': [], + 'postgres': [], + 'redis': [], + 'nginx': [], + 'other': [] + } + + process_stats = { + 'gunicorn': {'count': 0, 'cpu': 0, 'memory_mb': 0}, + 'celery': {'count': 0, 'cpu': 0, 'memory_mb': 0}, + 'postgres': {'count': 0, 'cpu': 0, 'memory_mb': 0}, + 'redis': {'count': 0, 'cpu': 0, 'memory_mb': 0}, + 'nginx': {'count': 0, 'cpu': 0, 'memory_mb': 0}, + } + + for proc in psutil.process_iter(['pid', 'name', 'cmdline', 'cpu_percent', 'memory_info']): + try: + proc_info = proc.info + name = proc_info['name'].lower() + cmdline = ' '.join(proc_info['cmdline']) if proc_info['cmdline'] else '' + cmdline_lower = cmdline.lower() + + cpu = proc_info.get('cpu_percent', 0) or 0 + memory = proc_info.get('memory_info', None) + memory_mb = (memory.rss / (1024**2)) if memory else 0 + + # Categorize processes + if 'gunicorn' in cmdline_lower or 'gunicorn' in name: + processes['gunicorn'].append({ + 'pid': proc_info['pid'], + 'name': name, + 'cpu_percent': round(cpu, 2), + 'memory_mb': round(memory_mb, 2) + }) + process_stats['gunicorn']['count'] += 1 + process_stats['gunicorn']['cpu'] += cpu + process_stats['gunicorn']['memory_mb'] += memory_mb + elif 'celery' in cmdline_lower or 'celery' in name: + processes['celery'].append({ + 'pid': proc_info['pid'], + 'name': name, + 'cpu_percent': round(cpu, 2), + 'memory_mb': round(memory_mb, 2) + }) + process_stats['celery']['count'] += 1 + process_stats['celery']['cpu'] += cpu + process_stats['celery']['memory_mb'] += memory_mb + elif 'postgres' in name or 'postgresql' in name: + processes['postgres'].append({ + 'pid': proc_info['pid'], + 'name': name, + 'cpu_percent': round(cpu, 2), + 'memory_mb': round(memory_mb, 2) + }) + process_stats['postgres']['count'] += 1 + process_stats['postgres']['cpu'] += cpu + process_stats['postgres']['memory_mb'] += memory_mb + elif 'redis' in name or 'redis-server' in name: + processes['redis'].append({ + 'pid': proc_info['pid'], + 'name': name, + 'cpu_percent': round(cpu, 2), + 'memory_mb': round(memory_mb, 2) + }) + process_stats['redis']['count'] += 1 + process_stats['redis']['cpu'] += cpu + process_stats['redis']['memory_mb'] += memory_mb + elif 'nginx' in name or 'caddy' in name: + processes['nginx'].append({ + 'pid': proc_info['pid'], + 'name': name, + 'cpu_percent': round(cpu, 2), + 'memory_mb': round(memory_mb, 2) + }) + process_stats['nginx']['count'] += 1 + process_stats['nginx']['cpu'] += cpu + process_stats['nginx']['memory_mb'] += memory_mb + except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): + continue + + # Round stats + for key in process_stats: + process_stats[key]['cpu'] = round(process_stats[key]['cpu'], 2) + process_stats[key]['memory_mb'] = round(process_stats[key]['memory_mb'], 2) + + status_data['processes'] = { + 'by_stack': process_stats, + 'details': {k: v[:10] for k, v in processes.items()} # Limit details to 10 per type + } + except Exception as e: + logger.error(f"Error getting process information: {str(e)}") + status_data['processes'] = {'error': str(e)} + + try: + # Module-specific task counts + from igny8_core.modules.planner.models import Keywords, Clusters, ContentIdeas + from igny8_core.modules.writer.models import Tasks, Images + + status_data['modules'] = { + 'planner': { + 'keywords': Keywords.objects.count(), + 'clusters': Clusters.objects.count(), + 'content_ideas': ContentIdeas.objects.count(), + }, + 'writer': { + 'tasks': Tasks.objects.count(), + 'images': Images.objects.count(), + } + } + except Exception as e: + logger.error(f"Error getting module statistics: {str(e)}") + status_data['modules'] = {'error': str(e)} + + return success_response(data=status_data, request=request) + + +@api_view(['GET']) +@permission_classes([AllowAny]) # Will check admin in view +def get_request_metrics(request, request_id): + """ + Get resource metrics for a specific request. + Only accessible to admins/developers. + """ + # Check if user is admin/developer + if not request.user.is_authenticated: + return error_response( + error='Authentication required', + status_code=http_status.HTTP_401_UNAUTHORIZED, + request=request + ) + + if not (hasattr(request.user, 'is_admin_or_developer') and request.user.is_admin_or_developer()): + return error_response( + error='Admin access required', + status_code=http_status.HTTP_403_FORBIDDEN, + request=request + ) + + # Get metrics from cache + from django.core.cache import cache + metrics = cache.get(f"resource_tracking_{request_id}") + + if not metrics: + return error_response( + error='Metrics not found or expired', + status_code=http_status.HTTP_404_NOT_FOUND, + request=request + ) + + return success_response(data=metrics, request=request) + + +@api_view(['POST']) +@permission_classes([AllowAny]) +def gitea_webhook(request): + """ + Webhook endpoint to receive push events from Gitea. + Handles automatic deployment when code is pushed to the repository. + """ + import json + import subprocess + import os + + try: + # Parse webhook payload + payload = json.loads(request.body) + event_type = request.headers.get('X-Gitea-Event', 'push') + + logger.info(f"[Webhook] Received {event_type} event from Gitea") + + # Only process push events + if event_type != 'push': + return success_response( + data={'status': 'ignored'}, + message=f'Event type {event_type} is not processed', + request=request + ) + + # Extract repository information + repository = payload.get('repository', {}) + repo_name = repository.get('name', '') + repo_full_name = repository.get('full_name', '') + ref = payload.get('ref', '') + + # Only process pushes to main branch + if ref != 'refs/heads/main': + logger.info(f"[Webhook] Ignoring push to {ref}, only processing main branch") + return success_response( + data={'status': 'ignored'}, + message=f'Push to {ref} ignored, only main branch is processed', + request=request + ) + + # Get commit information + commits = payload.get('commits', []) + commit_count = len(commits) + pusher = payload.get('pusher', {}).get('username', 'unknown') + + logger.info(f"[Webhook] Processing push: {commit_count} commit(s) by {pusher} to {repo_full_name}") + + # Pull latest code - run git pull directly + try: + import subprocess + logger.info(f"[Webhook] Pulling latest code...") + # Set safe directory first + subprocess.run( + ['git', 'config', '--global', '--add', 'safe.directory', '/data/app/igny8'], + capture_output=True, + timeout=5 + ) + # Pull latest code + result = subprocess.run( + ['git', '-C', '/data/app/igny8', 'pull', 'origin', 'main'], + capture_output=True, + text=True, + timeout=30 + ) + if result.returncode == 0: + logger.info(f"[Webhook] Git pull successful") + else: + logger.error(f"[Webhook] Git pull failed: {result.stderr}") + except Exception as e: + logger.error(f"[Webhook] Git pull error: {e}") + + # Trigger deployment - restart containers + deployment_success = False + deployment_error = None + + try: + # Try to use docker Python library first, fallback to subprocess + try: + import docker as docker_lib + client = docker_lib.DockerClient(base_url='unix://var/run/docker.sock') + + # Restart frontend container (don't restart backend from within itself) + logger.info(f"[Webhook] Restarting frontend container...") + frontend_container = client.containers.get("igny8_frontend") + frontend_container.restart(timeout=30) + logger.info(f"[Webhook] Frontend container restarted successfully") + + # Schedule backend restart via subprocess in background (non-blocking) + # This avoids deadlock from restarting the container we're running in + logger.info(f"[Webhook] Scheduling backend container restart...") + import threading + def restart_backend(): + import time + time.sleep(2) # Give webhook time to respond + try: + backend_container = client.containers.get("igny8_backend") + backend_container.restart(timeout=30) + logger.info(f"[Webhook] Backend container restarted successfully (delayed)") + except Exception as e: + logger.error(f"[Webhook] Delayed backend restart failed: {e}") + + restart_thread = threading.Thread(target=restart_backend, daemon=True) + restart_thread.start() + + deployment_success = True + + except ImportError: + # Fallback to subprocess with docker command + logger.info(f"[Webhook] Docker library not available, using subprocess...") + + # Try /usr/bin/docker or docker in PATH + docker_cmd = "/usr/bin/docker" + import shutil + if not os.path.exists(docker_cmd): + docker_cmd = shutil.which("docker") or "docker" + + # Restart backend container + logger.info(f"[Webhook] Restarting backend container...") + backend_result = subprocess.run( + [docker_cmd, "restart", "igny8_backend"], + capture_output=True, + text=True, + timeout=30 + ) + + if backend_result.returncode != 0: + raise Exception(f"Backend restart failed: {backend_result.stderr}") + logger.info(f"[Webhook] Backend container restarted successfully") + + # Restart frontend container + logger.info(f"[Webhook] Restarting frontend container...") + frontend_result = subprocess.run( + [docker_cmd, "restart", "igny8_frontend"], + capture_output=True, + text=True, + timeout=30 + ) + + if frontend_result.returncode != 0: + raise Exception(f"Frontend restart failed: {frontend_result.stderr}") + logger.info(f"[Webhook] Frontend container restarted successfully") + + deployment_success = True + + logger.info(f"[Webhook] Deployment completed: containers restarted") + + except subprocess.TimeoutExpired as e: + deployment_error = f"Deployment timeout: {str(e)}" + logger.error(f"[Webhook] {deployment_error}") + except Exception as deploy_error: + deployment_error = str(deploy_error) + logger.error(f"[Webhook] Deployment error: {deploy_error}", exc_info=True) + + return success_response( + data={ + 'status': 'success' if deployment_success else 'partial', + 'repository': repo_full_name, + 'branch': ref, + 'commits': commit_count, + 'pusher': pusher, + 'event': event_type, + 'deployment': { + 'success': deployment_success, + 'error': deployment_error + } + }, + message='Webhook received and processed', + request=request + ) + + except json.JSONDecodeError as e: + logger.error(f"[Webhook] Invalid JSON payload: {e}") + return error_response( + error='Invalid JSON payload', + status_code=http_status.HTTP_400_BAD_REQUEST, + request=request + ) + + except Exception as e: + logger.error(f"[Webhook] Error processing webhook: {e}", exc_info=True) + return error_response( + error=str(e), + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) diff --git a/tenant/backend/igny8_core/modules/writer/views.py b/tenant/backend/igny8_core/modules/writer/views.py new file mode 100644 index 00000000..76cba558 --- /dev/null +++ b/tenant/backend/igny8_core/modules/writer/views.py @@ -0,0 +1,1710 @@ +from rest_framework import viewsets, filters, status +from rest_framework.decorators import action +from rest_framework.response import Response +from django_filters.rest_framework import DjangoFilterBackend +from django.db import transaction, models +from django.db.models import Q +from drf_spectacular.utils import extend_schema, extend_schema_view +from igny8_core.api.base import SiteSectorModelViewSet +from igny8_core.api.pagination import CustomPageNumberPagination +from igny8_core.api.response import success_response, error_response +from igny8_core.api.throttles import DebugScopedRateThrottle +from igny8_core.api.permissions import IsAuthenticatedAndActive, IsViewerOrAbove, IsEditorOrAbove +from .models import Tasks, Images, Content +from .serializers import ( + TasksSerializer, + ImagesSerializer, + ContentSerializer, + ContentTaxonomySerializer, +) +from igny8_core.business.content.models import ContentTaxonomy # ContentAttribute model exists but serializer removed in Stage 1 +from igny8_core.business.content.services.content_generation_service import ContentGenerationService +from igny8_core.business.content.services.validation_service import ContentValidationService +from igny8_core.business.content.services.metadata_mapping_service import MetadataMappingService +from igny8_core.business.billing.exceptions import InsufficientCreditsError + + + +@extend_schema_view( + list=extend_schema(tags=['Writer']), + create=extend_schema(tags=['Writer']), + retrieve=extend_schema(tags=['Writer']), + update=extend_schema(tags=['Writer']), + partial_update=extend_schema(tags=['Writer']), + destroy=extend_schema(tags=['Writer']), +) +class TasksViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing tasks with CRUD operations + Unified API Standard v1.0 compliant + Stage 1 Refactored - removed deprecated filters + """ + queryset = Tasks.objects.select_related('cluster', 'site', 'sector') + serializer_class = TasksSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination # Explicitly use custom pagination + throttle_scope = 'writer' + throttle_classes = [DebugScopedRateThrottle] + + # DRF filtering configuration + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + + # Search configuration + search_fields = ['title', 'keywords'] + + # Ordering configuration + ordering_fields = ['title', 'created_at', 'status'] + ordering = ['-created_at'] # Default ordering (newest first) + + # Filter configuration - Stage 1: removed entity_type, cluster_role + filterset_fields = ['status', 'cluster_id', 'content_type', 'content_structure'] + + def perform_create(self, serializer): + """Require explicit site_id and sector_id - no defaults.""" + user = getattr(self.request, 'user', None) + + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = serializer.validated_data.get('site_id') or query_params.get('site_id') + sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id') + + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + # Site ID is REQUIRED + if not site_id: + raise ValidationError("site_id is required. Please select a site.") + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + raise ValidationError(f"Site with id {site_id} does not exist") + + # Sector ID is REQUIRED + if not sector_id: + raise ValidationError("sector_id is required. Please select a sector.") + + try: + sector = Sector.objects.get(id=sector_id) + if sector.site_id != site_id: + raise ValidationError(f"Sector does not belong to the selected site") + except Sector.DoesNotExist: + raise ValidationError(f"Sector with id {sector_id} does not exist") + + serializer.validated_data.pop('site_id', None) + serializer.validated_data.pop('sector_id', None) + + account = getattr(self.request, 'account', None) + if not account and user and user.is_authenticated and user.account: + account = user.account + if not account: + account = site.account + + serializer.save(account=account, site=site, sector=sector) + + @action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete') + def bulk_delete(self, request): + """Bulk delete tasks""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + deleted_count, _ = queryset.filter(id__in=ids).delete() + + return success_response(data={'deleted_count': deleted_count}, request=request) + + @action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update') + def bulk_update(self, request): + """Bulk update task status""" + ids = request.data.get('ids', []) + status_value = request.data.get('status') + + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + if not status_value: + return error_response( + error='No status provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + updated_count = queryset.filter(id__in=ids).update(status=status_value) + + return success_response(data={'updated_count': updated_count}, request=request) + + @action(detail=False, methods=['post'], url_path='auto_generate_content', url_name='auto_generate_content') + def auto_generate_content(self, request): + """Auto-generate content for tasks using ContentGenerationService""" + import logging + + logger = logging.getLogger(__name__) + + try: + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + if len(ids) > 10: + return error_response( + error='Maximum 10 tasks allowed for content generation', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account is required', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Validate task IDs exist + queryset = self.get_queryset() + existing_tasks = queryset.filter(id__in=ids, account=account) + existing_count = existing_tasks.count() + + if existing_count == 0: + return error_response( + error=f'No tasks found for the provided IDs: {ids}', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # Use service to generate content + service = ContentGenerationService() + try: + result = service.generate_content(ids, account) + + if result.get('success'): + if 'task_id' in result: + # Async task queued + return success_response( + data={'task_id': result['task_id']}, + message=result.get('message', 'Content generation started'), + request=request + ) + else: + # Synchronous execution + return success_response( + data=result, + message='Content generated successfully', + request=request + ) + else: + return error_response( + error=result.get('error', 'Content generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except InsufficientCreditsError as e: + return error_response( + error=str(e), + status_code=status.HTTP_402_PAYMENT_REQUIRED, + request=request + ) + except Exception as e: + logger.error(f"Error in auto_generate_content: {str(e)}", exc_info=True) + return error_response( + error=f'Content generation failed: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + except Exception as e: + logger.error(f"Unexpected error in auto_generate_content: {str(e)}", exc_info=True) + return error_response( + error=f'Unexpected error: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + +@extend_schema_view( + list=extend_schema(tags=['Writer']), + create=extend_schema(tags=['Writer']), + retrieve=extend_schema(tags=['Writer']), + update=extend_schema(tags=['Writer']), + partial_update=extend_schema(tags=['Writer']), + destroy=extend_schema(tags=['Writer']), +) +class ImagesViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing content images + Unified API Standard v1.0 compliant + """ + queryset = Images.objects.all() + serializer_class = ImagesSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination + throttle_scope = 'writer' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [DjangoFilterBackend, filters.OrderingFilter] + ordering_fields = ['created_at', 'position', 'id'] + ordering = ['-id'] # Sort by ID descending (newest first) + filterset_fields = ['task_id', 'content_id', 'image_type', 'status'] + + def perform_create(self, serializer): + """Override to automatically set account, site, and sector""" + from rest_framework.exceptions import ValidationError + + # Get site and sector from request (set by middleware) or user's active context + site = getattr(self.request, 'site', None) + sector = getattr(self.request, 'sector', None) + + if not site: + # Fallback to user's active site if not set by middleware + user = getattr(self.request, 'user', None) + if user and user.is_authenticated and hasattr(user, 'active_site'): + site = user.active_site + + if not sector and site: + # Fallback to default sector for the site if not set by middleware + from igny8_core.auth.models import Sector + sector = site.sectors.filter(is_default=True).first() + + # Site and sector are required - raise ValidationError if not available + # Use dict format for ValidationError to ensure proper error structure + if not site: + raise ValidationError({"site": ["Site is required for image creation. Please select a site."]}) + if not sector: + raise ValidationError({"sector": ["Sector is required for image creation. Please select a sector."]}) + + # Add site and sector to validated_data so base class can validate access + serializer.validated_data['site'] = site + serializer.validated_data['sector'] = sector + + # Call parent to set account and validate access + super().perform_create(serializer) + + @action(detail=True, methods=['get'], url_path='file', url_name='image_file') + def serve_image_file(self, request, pk=None): + """ + Serve image file from local path via URL + GET /api/v1/writer/images/{id}/file/ + """ + import os + from django.http import FileResponse, Http404 + from django.conf import settings + + try: + # Get image directly without account filtering for file serving + # This allows public access to image files + try: + image = Images.objects.get(pk=pk) + except Images.DoesNotExist: + return error_response( + error='Image not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # Check if image has a local path + if not image.image_path: + return error_response( + error='No local file path available for this image', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + file_path = image.image_path + + # Verify file exists at the saved path + if not os.path.exists(file_path): + logger.error(f"[serve_image_file] Image {pk} - File not found at saved path: {file_path}") + return error_response( + error=f'Image file not found at: {file_path}', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # Check if file is readable + if not os.access(file_path, os.R_OK): + return error_response( + error='Image file is not readable', + status_code=status.HTTP_403_FORBIDDEN, + request=request + ) + + # Determine content type from file extension + import mimetypes + content_type, _ = mimetypes.guess_type(file_path) + if not content_type: + content_type = 'image/png' # Default to PNG + + # Serve the file + try: + return FileResponse( + open(file_path, 'rb'), + content_type=content_type, + filename=os.path.basename(file_path) + ) + except Exception as e: + return error_response( + error=f'Failed to serve file: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + except Images.DoesNotExist: + return error_response( + error='Image not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error serving image file: {str(e)}", exc_info=True) + return error_response( + error=f'Failed to serve image: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=False, methods=['post'], url_path='auto_generate', url_name='auto_generate_images') + def auto_generate_images(self, request): + """Auto-generate images for tasks using AI""" + task_ids = request.data.get('task_ids', []) + if not task_ids: + return error_response( + error='No task IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + if len(task_ids) > 10: + return error_response( + error='Maximum 10 tasks allowed for image generation', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Get account + account = getattr(request, 'account', None) + account_id = account.id if account else None + + # Try to queue Celery task, fall back to synchronous if Celery not available + try: + from igny8_core.ai.tasks import run_ai_task + from kombu.exceptions import OperationalError as KombuOperationalError + + if hasattr(run_ai_task, 'delay'): + # Celery is available - queue async task + task = run_ai_task.delay( + function_name='generate_images', + payload={'ids': task_ids}, + account_id=account_id + ) + return success_response( + data={'task_id': str(task.id)}, + message='Image generation started', + request=request + ) + else: + # Celery not available - execute synchronously + result = run_ai_task( + function_name='generate_images', + payload={'ids': task_ids}, + account_id=account_id + ) + if result.get('success'): + return success_response( + data={'images_created': result.get('count', 0)}, + message=result.get('message', 'Image generation completed'), + request=request + ) + else: + return error_response( + error=result.get('error', 'Image generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except KombuOperationalError as e: + return error_response( + error='Task queue unavailable. Please try again.', + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + request=request + ) + except ImportError: + # Tasks module not available + return error_response( + error='Image generation task not available', + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + request=request + ) + except Exception as e: + import logging + logger = logging.getLogger(__name__) + logger.error(f"Error queuing image generation task: {str(e)}", exc_info=True) + return error_response( + error=f'Failed to start image generation: {str(e)}', + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=False, methods=['post'], url_path='bulk_update', url_name='bulk_update') + def bulk_update(self, request): + """Bulk update image status by content_id or image IDs + Updates all images for a content record (featured + 1-6 in-article images) + """ + from django.db.models import Q + from .models import Content + + content_id = request.data.get('content_id') + image_ids = request.data.get('ids', []) + status_value = request.data.get('status') + + if not status_value: + return error_response( + error='No status provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + + # Update by content_id if provided, otherwise by image IDs + if content_id: + try: + # Get the content object to also update images linked directly to content + content = Content.objects.get(id=content_id) + + # Update images linked directly to content (all images: featured + in-article) + # Note: task field was removed in refactor - images now link directly to content + updated_count = queryset.filter(content=content).update(status=status_value) + except Content.DoesNotExist: + return error_response( + error='Content not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + elif image_ids: + updated_count = queryset.filter(id__in=image_ids).update(status=status_value) + else: + return error_response( + error='Either content_id or ids must be provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + return success_response(data={'updated_count': updated_count}, request=request) + + @action(detail=False, methods=['get'], url_path='content_images', url_name='content_images') + def content_images(self, request): + """Get images grouped by content - one row per content with featured and in-article images""" + from .serializers import ContentImagesGroupSerializer, ContentImageSerializer + + account = getattr(request, 'account', None) + + # Get site_id and sector_id from query parameters + site_id = request.query_params.get('site_id') + sector_id = request.query_params.get('sector_id') + + # Get all content that has images (either directly or via task) + # First, get content with direct image links + queryset = Content.objects.filter(images__isnull=False) + if account: + queryset = queryset.filter(account=account) + + # Apply site/sector filtering if provided + if site_id: + try: + queryset = queryset.filter(site_id=int(site_id)) + except (ValueError, TypeError): + pass + + if sector_id: + try: + queryset = queryset.filter(sector_id=int(sector_id)) + except (ValueError, TypeError): + pass + + # Task field removed in Stage 1 - images are now only linked to content directly + # All images must be linked via content, not task + + # Build grouped response + grouped_data = [] + content_ids = set(queryset.values_list('id', flat=True).distinct()) + + for content_id in content_ids: + try: + content = Content.objects.get(id=content_id) + + # Get images linked directly to content + content_images = Images.objects.filter(content=content).order_by('position') + + # Get featured image + featured_image = content_images.filter(image_type='featured').first() + + # Get in-article images (sorted by position) + in_article_images = list(content_images.filter(image_type='in_article').order_by('position')) + + # Determine overall status + all_images = list(content_images) + if not all_images: + overall_status = 'pending' + elif all(img.status == 'generated' for img in all_images): + overall_status = 'complete' + elif any(img.status == 'failed' for img in all_images): + overall_status = 'failed' + elif any(img.status == 'generated' for img in all_images): + overall_status = 'partial' + else: + overall_status = 'pending' + + # Create serializer instances with request context for proper URL generation + featured_serializer = ContentImageSerializer(featured_image, context={'request': request}) if featured_image else None + in_article_serializers = [ContentImageSerializer(img, context={'request': request}) for img in in_article_images] + + grouped_data.append({ + 'content_id': content.id, + 'content_title': content.title or content.meta_title or f"Content #{content.id}", + 'content_status': content.status, # Add content status + 'featured_image': featured_serializer.data if featured_serializer else None, + 'in_article_images': [s.data for s in in_article_serializers], + 'overall_status': overall_status, + }) + except Content.DoesNotExist: + continue + + # Sort by content title + grouped_data.sort(key=lambda x: x['content_title']) + + return success_response( + data={ + 'count': len(grouped_data), + 'results': grouped_data + }, + request=request + ) + + @action(detail=False, methods=['post'], url_path='generate_images', url_name='generate_images') + def generate_images(self, request): + """Generate images from prompts - queues Celery task for sequential processing""" + from igny8_core.ai.tasks import process_image_generation_queue + + account = getattr(request, 'account', None) + image_ids = request.data.get('ids', []) + content_id = request.data.get('content_id') + + if not image_ids: + return error_response( + error='No image IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + account_id = account.id if account else None + + # Queue Celery task + try: + if hasattr(process_image_generation_queue, 'delay'): + task = process_image_generation_queue.delay( + image_ids=image_ids, + account_id=account_id, + content_id=content_id + ) + return success_response( + data={'task_id': str(task.id)}, + message='Image generation started', + request=request + ) + else: + # Fallback to synchronous execution (for testing) + result = process_image_generation_queue( + image_ids=image_ids, + account_id=account_id, + content_id=content_id + ) + return success_response(data=result, request=request) + except Exception as e: + logger.error(f"[generate_images] Error: {str(e)}", exc_info=True) + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='validation', url_name='validation') + def validation(self, request, pk=None): + """ + Stage 3: Get validation checklist for content. + + GET /api/v1/writer/content/{id}/validation/ + Returns aggregated validation checklist for Writer UI. + """ + content = self.get_object() + validation_service = ContentValidationService() + + errors = validation_service.validate_content(content) + publish_errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'ready_to_publish': len(publish_errors) == 0, + 'validation_errors': errors, + 'publish_errors': publish_errors, + 'metadata': { + 'has_entity_type': bool(content.content_type), + 'entity_type': content.content_type, + 'has_cluster_mapping': self._has_cluster_mapping(content), + 'has_taxonomy_mapping': self._has_taxonomy_mapping(content), + } + }, + request=request + ) + + @action(detail=True, methods=['post'], url_path='validate', url_name='validate') + def validate(self, request, pk=None): + """ + Stage 3: Re-run validators and return actionable errors. + + POST /api/v1/writer/content/{id}/validate/ + Re-validates content and returns structured errors. + """ + content = self.get_object() + validation_service = ContentValidationService() + + # Persist metadata mappings if task exists + # Metadata is now persisted directly on content - no task linkage needed + # mapping_service = MetadataMappingService() # DEPRECATED + # mapping_service.persist_task_metadata_to_content(content) # DEPRECATED + + errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'errors': errors, + }, + request=request + ) + + def _has_cluster_mapping(self, content): + """Helper to check if content has cluster mapping""" + from igny8_core.business.content.models import ContentClusterMap + return ContentClusterMap.objects.filter(content=content).exists() + + def _has_taxonomy_mapping(self, content): + """Helper to check if content has taxonomy mapping""" + from igny8_core.business.content.models import ContentTaxonomyMap + return ContentTaxonomyMap.objects.filter(content=content).exists() + +@extend_schema_view( + list=extend_schema(tags=['Writer']), + create=extend_schema(tags=['Writer']), + retrieve=extend_schema(tags=['Writer']), + update=extend_schema(tags=['Writer']), + partial_update=extend_schema(tags=['Writer']), + destroy=extend_schema(tags=['Writer']), +) +class ContentViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing content with new unified structure + Unified API Standard v1.0 compliant + Stage 1 Refactored - removed deprecated fields + """ + queryset = Content.objects.select_related('cluster', 'site', 'sector').prefetch_related('taxonomy_terms') + serializer_class = ContentSerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination + throttle_scope = 'writer' + throttle_classes = [DebugScopedRateThrottle] + + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + search_fields = ['title', 'content_html', 'external_url'] + ordering_fields = ['created_at', 'updated_at', 'status'] + ordering = ['-created_at'] + # Stage 1: removed task_id, entity_type, content_format, cluster_role, sync_status, external_type + filterset_fields = [ + 'cluster_id', + 'status', + 'content_type', + 'content_structure', + 'source', + ] + + def perform_create(self, serializer): + """Override to automatically set account""" + account = getattr(self.request, 'account', None) + if account: + serializer.save(account=account) + else: + serializer.save() + + @action(detail=False, methods=['POST'], url_path='bulk_delete', url_name='bulk_delete') + def bulk_delete(self, request): + """Bulk delete content""" + ids = request.data.get('ids', []) + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + queryset = self.get_queryset() + deleted_count, _ = queryset.filter(id__in=ids).delete() + + return success_response(data={'deleted_count': deleted_count}, request=request) + + @action(detail=True, methods=['post'], url_path='publish', url_name='publish', permission_classes=[IsAuthenticatedAndActive, IsEditorOrAbove]) + def publish(self, request, pk=None): + """ + STAGE 3: Publish content to WordPress site via Celery task. + Mirrors the automated publishing flow for manual publishing from Review page. + + POST /api/v1/writer/content/{id}/publish/ + { + "site_integration_id": 1 // Optional - defaults to finding WordPress integration for content's site + } + """ + from igny8_core.business.integration.models import SiteIntegration + from igny8_core.tasks.wordpress_publishing import publish_content_to_wordpress + import logging + + logger = logging.getLogger(__name__) + + content = self.get_object() + + # STAGE 3: Prevent duplicate publishing + if content.external_id: + return error_response( + error='Content already published. Use WordPress to update or unpublish first.', + status_code=status.HTTP_400_BAD_REQUEST, + request=request, + errors={'external_id': [f'Already published with ID: {content.external_id}']} + ) + + # Get site integration (use content's site if not specified) + site_integration_id = request.data.get('site_integration_id') + + if not site_integration_id: + # Find WordPress integration for this site + site_integrations = SiteIntegration.objects.filter( + site=content.site, + platform='wordpress', + is_active=True + ) + + if not site_integrations.exists(): + return error_response( + error='No active WordPress integration found for this site', + status_code=status.HTTP_400_BAD_REQUEST, + request=request, + errors={'site_integration': ['WordPress integration is required to publish']} + ) + + site_integration = site_integrations.first() + else: + try: + site_integration = SiteIntegration.objects.get( + id=site_integration_id, + site=content.site, + platform='wordpress' + ) + except SiteIntegration.DoesNotExist: + return error_response( + error=f'WordPress integration with id {site_integration_id} not found for this site', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # OPTIMISTIC UPDATE: Set status to published immediately for better UX + # The Celery task will update external_id and external_url when WordPress responds + content.status = 'published' + content.save(update_fields=['status', 'updated_at']) + + # Queue publishing task (same as automated flow) + try: + result = publish_content_to_wordpress.delay( + content_id=content.id, + site_integration_id=site_integration.id + ) + + logger.info(f"[ContentViewSet.publish] Queued Celery task {result.id} for content {content.id}, status set to 'published'") + + return success_response( + data={ + 'content_id': content.id, + 'task_id': result.id, + 'status': 'published', + 'message': 'Publishing queued - content will be published to WordPress shortly' + }, + message='Content status updated to published and queued for WordPress', + request=request, + status_code=status.HTTP_202_ACCEPTED + ) + + except Exception as e: + logger.error(f"[ContentViewSet.publish] Error queuing publish task: {str(e)}", exc_info=True) + # Revert status on error + content.status = 'review' + content.save(update_fields=['status', 'updated_at']) + return error_response( + error=f"Failed to queue publishing task: {str(e)}", + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='wordpress_status', url_name='wordpress_status') + def wordpress_status(self, request, pk=None): + """ + Get WordPress post status for published content. + Calls WordPress REST API to get current status. + + GET /api/v1/writer/content/{id}/wordpress_status/ + Returns: { + 'wordpress_status': 'publish'|'draft'|'pending'|null, + 'external_id': 123, + 'external_url': 'https://...', + 'last_checked': '2025-11-30T...' + } + """ + import requests + from django.utils import timezone + from igny8_core.business.integration.models import SiteIntegration + import logging + + logger = logging.getLogger(__name__) + content = self.get_object() + + if not content.external_id: + return success_response( + data={ + 'wordpress_status': None, + 'external_id': None, + 'external_url': None, + 'message': 'Content not published to WordPress yet' + }, + request=request + ) + + # Get WordPress integration for this content's site + try: + site_integration = SiteIntegration.objects.filter( + site=content.site, + platform='wordpress', + is_active=True + ).first() + + if not site_integration: + return error_response( + error='No active WordPress integration found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + # Call WordPress REST API to get post status + wordpress_url = f"{site_integration.site_url}/wp-json/igny8/v1/post-status/{content.external_id}/" + headers = { + 'X-IGNY8-API-KEY': site_integration.api_key, + } + + response = requests.get(wordpress_url, headers=headers, timeout=10) + + if response.status_code == 200: + wp_data = response.json().get('data', {}) + return success_response( + data={ + 'wordpress_status': wp_data.get('post_status'), + 'external_id': content.external_id, + 'external_url': content.external_url, + 'post_title': wp_data.get('post_title'), + 'post_modified': wp_data.get('post_modified'), + 'last_checked': timezone.now().isoformat() + }, + request=request + ) + else: + logger.error(f"WordPress API error: {response.status_code} - {response.text}") + return error_response( + error=f'Failed to get WordPress status: {response.status_code}', + status_code=status.HTTP_502_BAD_GATEWAY, + request=request + ) + + except requests.RequestException as e: + logger.error(f"Request to WordPress failed: {str(e)}") + return error_response( + error=f'Connection to WordPress failed: {str(e)}', + status_code=status.HTTP_502_BAD_GATEWAY, + request=request + ) + except Exception as e: + logger.error(f"Error getting WordPress status: {str(e)}", exc_info=True) + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['post'], url_path='unpublish', url_name='unpublish', permission_classes=[IsAuthenticatedAndActive, IsEditorOrAbove]) + def unpublish(self, request, pk=None): + """ + STAGE 3: Unpublish content - clear external references and revert to draft. + Note: This does NOT delete the WordPress post, only clears the link. + + POST /api/v1/writer/content/{id}/unpublish/ + """ + content = self.get_object() + + if not content.external_id: + return error_response( + error='Content is not published', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + # Store the old values for response + old_external_id = content.external_id + old_external_url = content.external_url + + # Clear external references and revert status + content.external_id = None + content.external_url = None + content.status = 'draft' + content.save(update_fields=['external_id', 'external_url', 'status', 'updated_at']) + + return success_response( + data={ + 'content_id': content.id, + 'status': content.status, + 'was_external_id': old_external_id, + 'was_external_url': old_external_url, + }, + message='Content unpublished successfully. WordPress post was not deleted.', + request=request + ) + + @action(detail=False, methods=['post'], url_path='generate_image_prompts', url_name='generate_image_prompts') + def generate_image_prompts(self, request): + """Generate image prompts for content records - same pattern as other AI functions""" + from igny8_core.ai.tasks import run_ai_task + + account = getattr(request, 'account', None) + ids = request.data.get('ids', []) + + if not ids: + return error_response( + error='No IDs provided', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + account_id = account.id if account else None + + # Queue Celery task + try: + if hasattr(run_ai_task, 'delay'): + task = run_ai_task.delay( + function_name='generate_image_prompts', + payload={'ids': ids}, + account_id=account_id + ) + return success_response( + data={'task_id': str(task.id)}, + message='Image prompt generation started', + request=request + ) + else: + # Fallback to synchronous execution + result = run_ai_task( + function_name='generate_image_prompts', + payload={'ids': ids}, + account_id=account_id + ) + if result.get('success'): + return success_response( + data={'prompts_created': result.get('count', 0)}, + message='Image prompts generated successfully', + request=request + ) + else: + return error_response( + error=result.get('error', 'Image prompt generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='validation', url_name='validation') + def validation(self, request, pk=None): + """ + Stage 3: Get validation checklist for content. + + GET /api/v1/writer/content/{id}/validation/ + Returns aggregated validation checklist for Writer UI. + """ + content = self.get_object() + validation_service = ContentValidationService() + + errors = validation_service.validate_content(content) + publish_errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'ready_to_publish': len(publish_errors) == 0, + 'validation_errors': errors, + 'publish_errors': publish_errors, + 'metadata': { + 'has_entity_type': bool(content.content_type), + 'entity_type': content.content_type, + 'has_cluster_mapping': self._has_cluster_mapping(content), + 'has_taxonomy_mapping': self._has_taxonomy_mapping(content), + } + }, + request=request + ) + + @action(detail=True, methods=['post'], url_path='validate', url_name='validate') + def validate(self, request, pk=None): + """ + Stage 3: Re-run validators and return actionable errors. + + POST /api/v1/writer/content/{id}/validate/ + Re-validates content and returns structured errors. + """ + content = self.get_object() + validation_service = ContentValidationService() + + # Persist metadata mappings if task exists + # Metadata is now persisted directly on content - no task linkage needed + # mapping_service = MetadataMappingService() # DEPRECATED + # mapping_service.persist_task_metadata_to_content(content) # DEPRECATED + + errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'errors': errors, + }, + request=request + ) + + def _has_cluster_mapping(self, content): + """Helper to check if content has cluster mapping""" + from igny8_core.business.content.models import ContentClusterMap + return ContentClusterMap.objects.filter(content=content).exists() + + def _has_taxonomy_mapping(self, content): + """Helper to check if content has taxonomy mapping""" + from igny8_core.business.content.models import ContentTaxonomyMap + return ContentTaxonomyMap.objects.filter(content=content).exists() + + @action(detail=False, methods=['post'], url_path='generate_product', url_name='generate_product') + def generate_product(self, request): + """ + Generate product content (Phase 8). + + POST /api/v1/writer/content/generate_product/ + { + "name": "Product Name", + "description": "Product description", + "features": ["Feature 1", "Feature 2"], + "target_audience": "Target audience", + "primary_keyword": "Primary keyword", + "site_id": 1, // optional + "sector_id": 1 // optional + } + """ + from igny8_core.business.content.services.content_generation_service import ContentGenerationService + from igny8_core.auth.models import Site, Sector + + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + product_data = request.data + site_id = product_data.get('site_id') + sector_id = product_data.get('sector_id') + + site = None + sector = None + + if site_id: + try: + site = Site.objects.get(id=site_id, account=account) + except Site.DoesNotExist: + return error_response( + error='Site not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + if sector_id: + try: + sector = Sector.objects.get(id=sector_id, account=account) + except Sector.DoesNotExist: + return error_response( + error='Sector not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + service = ContentGenerationService() + + try: + result = service.generate_product_content( + product_data=product_data, + account=account, + site=site, + sector=sector + ) + + if result.get('success'): + return success_response( + data={'task_id': result.get('task_id')}, + message=result.get('message', 'Product content generation started'), + request=request + ) + else: + return error_response( + error=result.get('error', 'Product content generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='validation', url_name='validation') + def validation(self, request, pk=None): + """ + Stage 3: Get validation checklist for content. + + GET /api/v1/writer/content/{id}/validation/ + Returns aggregated validation checklist for Writer UI. + """ + content = self.get_object() + validation_service = ContentValidationService() + + errors = validation_service.validate_content(content) + publish_errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'ready_to_publish': len(publish_errors) == 0, + 'validation_errors': errors, + 'publish_errors': publish_errors, + 'metadata': { + 'has_entity_type': bool(content.content_type), + 'entity_type': content.content_type, + 'has_cluster_mapping': self._has_cluster_mapping(content), + 'has_taxonomy_mapping': self._has_taxonomy_mapping(content), + } + }, + request=request + ) + + @action(detail=True, methods=['post'], url_path='validate', url_name='validate') + def validate(self, request, pk=None): + """ + Stage 3: Re-run validators and return actionable errors. + + POST /api/v1/writer/content/{id}/validate/ + Re-validates content and returns structured errors. + """ + content = self.get_object() + validation_service = ContentValidationService() + + # Persist metadata mappings if task exists + # Metadata is now persisted directly on content - no task linkage needed + # mapping_service = MetadataMappingService() # DEPRECATED + # mapping_service.persist_task_metadata_to_content(content) # DEPRECATED + + errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'errors': errors, + }, + request=request + ) + + def _has_cluster_mapping(self, content): + """Helper to check if content has cluster mapping""" + from igny8_core.business.content.models import ContentClusterMap + return ContentClusterMap.objects.filter(content=content).exists() + + def _has_taxonomy_mapping(self, content): + """Helper to check if content has taxonomy mapping""" + from igny8_core.business.content.models import ContentTaxonomyMap + return ContentTaxonomyMap.objects.filter(content=content).exists() + + @action(detail=False, methods=['post'], url_path='generate_service', url_name='generate_service') + def generate_service(self, request): + """ + Generate service page content (Phase 8). + + POST /api/v1/writer/content/generate_service/ + { + "name": "Service Name", + "description": "Service description", + "benefits": ["Benefit 1", "Benefit 2"], + "target_audience": "Target audience", + "primary_keyword": "Primary keyword", + "site_id": 1, // optional + "sector_id": 1 // optional + } + """ + from igny8_core.business.content.services.content_generation_service import ContentGenerationService + from igny8_core.auth.models import Site, Sector + + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + service_data = request.data + site_id = service_data.get('site_id') + sector_id = service_data.get('sector_id') + + site = None + sector = None + + if site_id: + try: + site = Site.objects.get(id=site_id, account=account) + except Site.DoesNotExist: + return error_response( + error='Site not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + if sector_id: + try: + sector = Sector.objects.get(id=sector_id, account=account) + except Sector.DoesNotExist: + return error_response( + error='Sector not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + service = ContentGenerationService() + + try: + result = service.generate_service_page( + service_data=service_data, + account=account, + site=site, + sector=sector + ) + + if result.get('success'): + return success_response( + data={'task_id': result.get('task_id')}, + message=result.get('message', 'Service page generation started'), + request=request + ) + else: + return error_response( + error=result.get('error', 'Service page generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='validation', url_name='validation') + def validation(self, request, pk=None): + """ + Stage 3: Get validation checklist for content. + + GET /api/v1/writer/content/{id}/validation/ + Returns aggregated validation checklist for Writer UI. + """ + content = self.get_object() + validation_service = ContentValidationService() + + errors = validation_service.validate_content(content) + publish_errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'ready_to_publish': len(publish_errors) == 0, + 'validation_errors': errors, + 'publish_errors': publish_errors, + 'metadata': { + 'has_entity_type': bool(content.content_type), + 'entity_type': content.content_type, + 'has_cluster_mapping': self._has_cluster_mapping(content), + 'has_taxonomy_mapping': self._has_taxonomy_mapping(content), + } + }, + request=request + ) + + @action(detail=True, methods=['post'], url_path='validate', url_name='validate') + def validate(self, request, pk=None): + """ + Stage 3: Re-run validators and return actionable errors. + + POST /api/v1/writer/content/{id}/validate/ + Re-validates content and returns structured errors. + """ + content = self.get_object() + validation_service = ContentValidationService() + + # Persist metadata mappings if task exists + # Metadata is now persisted directly on content - no task linkage needed + # mapping_service = MetadataMappingService() # DEPRECATED + # mapping_service.persist_task_metadata_to_content(content) # DEPRECATED + + errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'errors': errors, + }, + request=request + ) + + def _has_cluster_mapping(self, content): + """Helper to check if content has cluster mapping""" + from igny8_core.business.content.models import ContentClusterMap + return ContentClusterMap.objects.filter(content=content).exists() + + def _has_taxonomy_mapping(self, content): + """Helper to check if content has taxonomy mapping""" + from igny8_core.business.content.models import ContentTaxonomyMap + return ContentTaxonomyMap.objects.filter(content=content).exists() + + @action(detail=False, methods=['post'], url_path='generate_taxonomy', url_name='generate_taxonomy') + def generate_taxonomy(self, request): + """ + Generate taxonomy page content (Phase 8). + + POST /api/v1/writer/content/generate_taxonomy/ + { + "name": "Taxonomy Name", + "description": "Taxonomy description", + "items": ["Item 1", "Item 2"], + "primary_keyword": "Primary keyword", + "site_id": 1, // optional + "sector_id": 1 // optional + } + """ + from igny8_core.business.content.services.content_generation_service import ContentGenerationService + from igny8_core.auth.models import Site, Sector + + account = getattr(request, 'account', None) + if not account: + return error_response( + error='Account not found', + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + taxonomy_data = request.data + site_id = taxonomy_data.get('site_id') + sector_id = taxonomy_data.get('sector_id') + + site = None + sector = None + + if site_id: + try: + site = Site.objects.get(id=site_id, account=account) + except Site.DoesNotExist: + return error_response( + error='Site not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + if sector_id: + try: + sector = Sector.objects.get(id=sector_id, account=account) + except Sector.DoesNotExist: + return error_response( + error='Sector not found', + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + service = ContentGenerationService() + + try: + result = service.generate_taxonomy( + taxonomy_data=taxonomy_data, + account=account, + site=site, + sector=sector + ) + + if result.get('success'): + return success_response( + data={'task_id': result.get('task_id')}, + message=result.get('message', 'Taxonomy generation started'), + request=request + ) + else: + return error_response( + error=result.get('error', 'Taxonomy generation failed'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + except Exception as e: + return error_response( + error=str(e), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + request=request + ) + + @action(detail=True, methods=['get'], url_path='validation', url_name='validation') + def validation(self, request, pk=None): + """ + Stage 3: Get validation checklist for content. + + GET /api/v1/writer/content/{id}/validation/ + Returns aggregated validation checklist for Writer UI. + """ + content = self.get_object() + validation_service = ContentValidationService() + + errors = validation_service.validate_content(content) + publish_errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'ready_to_publish': len(publish_errors) == 0, + 'validation_errors': errors, + 'publish_errors': publish_errors, + 'metadata': { + 'has_entity_type': bool(content.content_type), + 'entity_type': content.content_type, + 'has_cluster_mapping': self._has_cluster_mapping(content), + 'has_taxonomy_mapping': self._has_taxonomy_mapping(content), + } + }, + request=request + ) + + @action(detail=True, methods=['post'], url_path='validate', url_name='validate') + def validate(self, request, pk=None): + """ + Stage 3: Re-run validators and return actionable errors. + + POST /api/v1/writer/content/{id}/validate/ + Re-validates content and returns structured errors. + """ + content = self.get_object() + validation_service = ContentValidationService() + + # Persist metadata mappings if task exists + # Metadata is now persisted directly on content - no task linkage needed + # mapping_service = MetadataMappingService() # DEPRECATED + # mapping_service.persist_task_metadata_to_content(content) # DEPRECATED + + errors = validation_service.validate_for_publish(content) + + return success_response( + data={ + 'content_id': content.id, + 'is_valid': len(errors) == 0, + 'errors': errors, + }, + request=request + ) + + def _has_cluster_mapping(self, content): + """Helper to check if content has cluster mapping""" + from igny8_core.business.content.models import ContentClusterMap + return ContentClusterMap.objects.filter(content=content).exists() + + def _has_taxonomy_mapping(self, content): + """Helper to check if content has taxonomy mapping""" + # Check new M2M relationship + return content.taxonomy_terms.exists() + + +@extend_schema_view( + list=extend_schema(tags=['Writer']), + create=extend_schema(tags=['Writer']), + retrieve=extend_schema(tags=['Writer']), + update=extend_schema(tags=['Writer']), + partial_update=extend_schema(tags=['Writer']), + destroy=extend_schema(tags=['Writer']), +) +class ContentTaxonomyViewSet(SiteSectorModelViewSet): + """ + ViewSet for managing content taxonomies (categories, tags, product attributes) + Unified API Standard v1.0 compliant + """ + queryset = ContentTaxonomy.objects.select_related('parent', 'site', 'sector').prefetch_related('clusters', 'contents') + serializer_class = ContentTaxonomySerializer + permission_classes = [IsAuthenticatedAndActive, IsViewerOrAbove] + pagination_class = CustomPageNumberPagination + throttle_scope = 'writer' + throttle_classes = [DebugScopedRateThrottle] + + # DRF filtering configuration + filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter] + + # Search configuration + search_fields = ['name', 'slug', 'description', 'external_taxonomy'] + + # Ordering configuration + ordering_fields = ['name', 'taxonomy_type', 'count', 'created_at'] + ordering = ['taxonomy_type', 'name'] + + # Filter configuration + # Removed "parent" to avoid non-model field in filterset (breaks drf-spectacular) + filterset_fields = ['taxonomy_type', 'sync_status', 'external_id', 'external_taxonomy'] + + def perform_create(self, serializer): + """Create taxonomy with site/sector context""" + user = getattr(self.request, 'user', None) + + try: + query_params = getattr(self.request, 'query_params', None) + if query_params is None: + query_params = getattr(self.request, 'GET', {}) + except AttributeError: + query_params = {} + + site_id = serializer.validated_data.get('site_id') or query_params.get('site_id') + sector_id = serializer.validated_data.get('sector_id') or query_params.get('sector_id') + + from igny8_core.auth.models import Site, Sector + from rest_framework.exceptions import ValidationError + + if not site_id: + raise ValidationError("site_id is required") + + try: + site = Site.objects.get(id=site_id) + except Site.DoesNotExist: + raise ValidationError(f"Site with id {site_id} does not exist") + + if not sector_id: + raise ValidationError("sector_id is required") + + try: + sector = Sector.objects.get(id=sector_id) + if sector.site_id != site_id: + raise ValidationError(f"Sector does not belong to the selected site") + except Sector.DoesNotExist: + raise ValidationError(f"Sector with id {sector_id} does not exist") + + serializer.validated_data.pop('site_id', None) + serializer.validated_data.pop('sector_id', None) + + account = getattr(self.request, 'account', None) + if not account and user and user.is_authenticated and user.account: + account = user.account + if not account: + account = site.account + + serializer.save(account=account, site=site, sector=sector) + + @action(detail=True, methods=['post'], permission_classes=[IsAuthenticatedAndActive, IsEditorOrAbove]) + def map_to_cluster(self, request, pk=None): + """Map taxonomy to semantic cluster""" + taxonomy = self.get_object() + cluster_id = request.data.get('cluster_id') + + if not cluster_id: + return error_response( + error="cluster_id is required", + status_code=status.HTTP_400_BAD_REQUEST, + request=request + ) + + from igny8_core.business.planning.models import Clusters + try: + cluster = Clusters.objects.get(id=cluster_id, site=taxonomy.site) + taxonomy.clusters.add(cluster) + + return success_response( + data={'message': f'Taxonomy "{taxonomy.name}" mapped to cluster "{cluster.name}"'}, + message="Taxonomy mapped to cluster successfully", + request=request + ) + except Clusters.DoesNotExist: + return error_response( + error=f"Cluster with id {cluster_id} not found", + status_code=status.HTTP_404_NOT_FOUND, + request=request + ) + + @action(detail=True, methods=['get']) + def contents(self, request, pk=None): + """Get all content associated with this taxonomy""" + taxonomy = self.get_object() + contents = taxonomy.contents.all() + + serializer = ContentSerializer(contents, many=True, context={'request': request}) + + return success_response( + data=serializer.data, + message=f"Found {contents.count()} content items for taxonomy '{taxonomy.name}'", + request=request + ) + + +# ContentAttributeViewSet temporarily disabled - ContentAttributeSerializer was removed in Stage 1 +# TODO: Re-implement or remove completely based on Stage 1 architecture decisions + + diff --git a/tenant/backend/igny8_core/settings.py b/tenant/backend/igny8_core/settings.py new file mode 100644 index 00000000..22ecaf77 --- /dev/null +++ b/tenant/backend/igny8_core/settings.py @@ -0,0 +1,616 @@ +""" +Django settings for igny8_core project. +""" + +from pathlib import Path +from datetime import timedelta +from urllib.parse import urlparse +import os + +BASE_DIR = Path(__file__).resolve().parent.parent + +# SECURITY: SECRET_KEY must be set via environment variable in production +# Generate a new key with: python -c "from django.core.management.utils import get_random_secret_key; print(get_random_secret_key())" +SECRET_KEY = os.getenv('SECRET_KEY', 'django-insecure-)#i8!6+_&j97eb_4actu86=qtg)p+p#)vr48!ahjs8u=o5#5aw') + +# SECURITY: DEBUG should be False in production +# Set DEBUG=False via environment variable for production deployments +DEBUG = os.getenv('DEBUG', 'False').lower() == 'true' + +# Unified API Standard v1.0 Feature Flags +# Set IGNY8_USE_UNIFIED_EXCEPTION_HANDLER=True to enable unified exception handler +# Set IGNY8_DEBUG_THROTTLE=True to bypass rate limiting in development +IGNY8_DEBUG_THROTTLE = os.getenv('IGNY8_DEBUG_THROTTLE', str(DEBUG)).lower() == 'true' +USE_SITE_BUILDER_REFACTOR = os.getenv('USE_SITE_BUILDER_REFACTOR', 'false').lower() == 'true' + +ALLOWED_HOSTS = [ + '*', # Allow all hosts for flexibility + 'api.igny8.com', + 'app.igny8.com', + 'igny8.com', + 'www.igny8.com', + 'localhost', + '127.0.0.1', + # Note: Do NOT add static IP addresses here - they change on container restart + # Use container names or domain names instead +] + +INSTALLED_APPS = [ + 'igny8_core.admin.apps.Igny8AdminConfig', # Custom admin config + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'rest_framework', + 'django_filters', + 'corsheaders', + 'drf_spectacular', # OpenAPI 3.0 schema generation + 'igny8_core.auth.apps.Igny8CoreAuthConfig', # Use app config with custom label + 'igny8_core.ai.apps.AIConfig', # AI Framework + 'igny8_core.modules.planner.apps.PlannerConfig', + 'igny8_core.modules.writer.apps.WriterConfig', + 'igny8_core.modules.system.apps.SystemConfig', + 'igny8_core.modules.billing.apps.BillingConfig', + 'igny8_core.business.automation', # AI Automation Pipeline + 'igny8_core.business.optimization.apps.OptimizationConfig', + 'igny8_core.business.publishing.apps.PublishingConfig', + 'igny8_core.business.integration.apps.IntegrationConfig', + 'igny8_core.modules.linker.apps.LinkerConfig', + 'igny8_core.modules.optimizer.apps.OptimizerConfig', + 'igny8_core.modules.publisher.apps.PublisherConfig', + 'igny8_core.modules.integration.apps.IntegrationConfig', +] + +# System module needs explicit registration for admin + +AUTH_USER_MODEL = 'igny8_core_auth.User' + +CSRF_TRUSTED_ORIGINS = [ + 'https://api.igny8.com', + 'https://app.igny8.com', + 'http://localhost:8011', + 'http://127.0.0.1:8011', +] + +# Only use secure cookies in production (HTTPS) +# Default to False - set USE_SECURE_COOKIES=True in docker-compose for production +# This allows local development to work without HTTPS +USE_SECURE_COOKIES = os.getenv('USE_SECURE_COOKIES', 'False').lower() == 'true' +SESSION_COOKIE_SECURE = USE_SECURE_COOKIES +CSRF_COOKIE_SECURE = USE_SECURE_COOKIES + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'whitenoise.middleware.WhiteNoiseMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'igny8_core.middleware.request_id.RequestIDMiddleware', # Request ID tracking (must be early) + 'igny8_core.auth.middleware.AccountContextMiddleware', # Multi-account support + # AccountContextMiddleware sets request.account from JWT + 'igny8_core.middleware.resource_tracker.ResourceTrackingMiddleware', # Resource tracking for admin debug + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'igny8_core.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [BASE_DIR / 'igny8_core' / 'templates'], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'igny8_core.wsgi.application' + +DATABASES = {} + +database_url = os.getenv("DATABASE_URL") +db_engine = os.getenv("DB_ENGINE", "").lower() +force_postgres = os.getenv("DJANGO_FORCE_POSTGRES", "false").lower() == "true" + +if database_url: + parsed = urlparse(database_url) + scheme = (parsed.scheme or "").lower() + + if scheme in {"sqlite", "sqlite3"}: + # Support both absolute and project-relative SQLite paths + netloc_path = f"{parsed.netloc}{parsed.path}" if parsed.netloc else parsed.path + db_path = netloc_path.lstrip("/") or "db.sqlite3" + if os.path.isabs(netloc_path): + sqlite_name = netloc_path + else: + sqlite_name = Path(db_path) if os.path.isabs(db_path) else BASE_DIR / db_path + DATABASES["default"] = { + "ENGINE": "django.db.backends.sqlite3", + "NAME": str(sqlite_name), + } + else: + DATABASES["default"] = { + "ENGINE": "django.db.backends.postgresql", + "NAME": parsed.path.lstrip("/") or os.getenv("DB_NAME", "igny8_db"), + "USER": parsed.username or os.getenv("DB_USER", "igny8"), + "PASSWORD": parsed.password or os.getenv("DB_PASSWORD", "igny8pass"), + "HOST": parsed.hostname or os.getenv("DB_HOST", "postgres"), + "PORT": str(parsed.port or os.getenv("DB_PORT", "5432")), + } +elif db_engine in {"sqlite", "sqlite3"} or os.getenv("USE_SQLITE", "false").lower() == "true": + sqlite_name = os.getenv("SQLITE_NAME") + if not sqlite_name: + sqlite_name = BASE_DIR / "db.sqlite3" + DATABASES["default"] = { + "ENGINE": "django.db.backends.sqlite3", + "NAME": str(sqlite_name), + } +elif DEBUG and not force_postgres and not os.getenv("DB_HOST") and not os.getenv("DB_NAME") and not os.getenv("DB_USER"): + DATABASES["default"] = { + "ENGINE": "django.db.backends.sqlite3", + "NAME": str(BASE_DIR / "db.sqlite3"), + } +else: + DATABASES["default"] = { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.getenv("DB_NAME", "igny8_db"), + "USER": os.getenv("DB_USER", "igny8"), + "PASSWORD": os.getenv("DB_PASSWORD", "igny8pass"), + "HOST": os.getenv("DB_HOST", "postgres"), + "PORT": os.getenv("DB_PORT", "5432"), + } + +AUTH_PASSWORD_VALIDATORS = [ + {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, + {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator'}, + {'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}, + {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}, +] + +LANGUAGE_CODE = 'en-us' +TIME_ZONE = 'UTC' +USE_I18N = True +USE_TZ = True + +STATIC_URL = '/static/' +STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') + +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + +# Only use SECURE_PROXY_SSL_HEADER in production behind reverse proxy +# Default to False - set USE_SECURE_PROXY_HEADER=True in docker-compose for production +# Caddy sets X-Forwarded-Proto header, so enable this when behind Caddy +USE_SECURE_PROXY = os.getenv('USE_SECURE_PROXY_HEADER', 'False').lower() == 'true' +if USE_SECURE_PROXY: + SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') +else: + SECURE_PROXY_SSL_HEADER = None + +# Admin login URL - use relative URL to avoid hardcoded domain +LOGIN_URL = '/admin/login/' +LOGIN_REDIRECT_URL = '/admin/' + +# Force Django to use request.get_host() instead of Sites framework +# This ensures redirects use the current request's host +USE_X_FORWARDED_HOST = False + +# REST Framework Configuration +REST_FRAMEWORK = { + 'DEFAULT_PAGINATION_CLASS': 'igny8_core.api.pagination.CustomPageNumberPagination', + 'PAGE_SIZE': 10, + 'DEFAULT_FILTER_BACKENDS': [ + 'django_filters.rest_framework.DjangoFilterBackend', + 'rest_framework.filters.SearchFilter', + 'rest_framework.filters.OrderingFilter', + ], + 'DEFAULT_PERMISSION_CLASSES': [ + 'igny8_core.api.permissions.IsAuthenticatedAndActive', + 'igny8_core.api.permissions.HasTenantAccess', + ], + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'igny8_core.api.authentication.APIKeyAuthentication', # WordPress API key authentication (check first) + 'igny8_core.api.authentication.JWTAuthentication', # JWT token authentication + 'igny8_core.api.authentication.CSRFExemptSessionAuthentication', # Session auth without CSRF for API + 'rest_framework.authentication.BasicAuthentication', # Enable basic auth as fallback + ], + # Unified API Standard v1.0 Configuration + # Exception handler - wraps all errors in unified format + # Unified API Standard v1.0: Exception handler enabled by default + # Set IGNY8_USE_UNIFIED_EXCEPTION_HANDLER=False to disable + 'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler' if os.getenv('IGNY8_USE_UNIFIED_EXCEPTION_HANDLER', 'True').lower() == 'false' else 'igny8_core.api.exception_handlers.custom_exception_handler', + # Rate limiting - configured but bypassed in DEBUG mode + 'DEFAULT_THROTTLE_CLASSES': [ + 'igny8_core.api.throttles.DebugScopedRateThrottle', + ], + 'DEFAULT_THROTTLE_RATES': { + # AI Functions - Expensive operations (kept modest but higher to reduce false 429s) + 'ai_function': '60/min', + 'image_gen': '90/min', + # Content Operations + 'content_write': '180/min', + 'content_read': '600/min', + # Authentication + 'auth': '300/min', # Login, register, password reset + 'auth_strict': '120/min', # Sensitive auth operations + 'auth_read': '600/min', # Read-only auth-adjacent endpoints (e.g., subscriptions, industries) + # Planner Operations + 'planner': '300/min', + 'planner_ai': '60/min', + # Writer Operations + 'writer': '300/min', + 'writer_ai': '60/min', + # System Operations + 'system': '600/min', + 'system_admin': '120/min', + # Billing Operations + 'billing': '180/min', + 'billing_admin': '60/min', + 'linker': '180/min', + 'optimizer': '60/min', + 'integration': '600/min', + # Default fallback + 'default': '600/min', + }, + # OpenAPI Schema Generation (drf-spectacular) + 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema', +} + +# drf-spectacular Settings for OpenAPI 3.0 Schema Generation +SPECTACULAR_SETTINGS = { + 'TITLE': 'IGNY8 API v1.0', + 'DESCRIPTION': ''' + IGNY8 Unified API Standard v1.0 + + A comprehensive REST API for content planning, creation, and management. + + ## Features + - **Unified Response Format**: All endpoints return consistent JSON structure + - **Layered Authorization**: Authentication → Tenant Access → Role → Site/Sector + - **Centralized Error Handling**: All errors wrapped in unified format + - **Scoped Rate Limiting**: Different limits for different operation types + - **Tenant Isolation**: All resources scoped by account/site/sector + - **Request Tracking**: Every request has a unique ID for debugging + + ## Authentication + All endpoints require JWT Bearer token authentication except: + - `GET /api/v1/system/ping/` - Health check endpoint + - `POST /api/v1/auth/login/` - User login + - `POST /api/v1/auth/register/` - User registration + - `GET /api/v1/auth/plans/` - List subscription plans + - `GET /api/v1/auth/industries/` - List industries + - `GET /api/v1/system/status/` - System status + + Include token in Authorization header: + ``` + Authorization: Bearer + ``` + + ## Response Format + All successful responses follow this format: + ```json + { + "success": true, + "data": {...}, + "message": "Optional success message", + "request_id": "uuid" + } + ``` + + All error responses follow this format: + ```json + { + "success": false, + "error": "Error message", + "errors": { + "field_name": ["Field-specific errors"] + }, + "request_id": "uuid" + } + ``` + + ## Rate Limiting + Rate limits are scoped by operation type. Check response headers: + - `X-Throttle-Limit`: Maximum requests allowed + - `X-Throttle-Remaining`: Remaining requests in current window + - `X-Throttle-Reset`: Time when limit resets (Unix timestamp) + + ## Pagination + List endpoints support pagination with query parameters: + - `page`: Page number (default: 1) + - `page_size`: Items per page (default: 10, max: 100) + + Paginated responses include: + ```json + { + "success": true, + "count": 100, + "next": "http://api.igny8.com/api/v1/endpoint/?page=2", + "previous": null, + "results": [...] + } + ``` + ''', + 'VERSION': '1.0.0', + 'SERVE_INCLUDE_SCHEMA': False, + 'SCHEMA_PATH_PREFIX': '/api/v1', + 'COMPONENT_SPLIT_REQUEST': True, + 'COMPONENT_NO_READ_ONLY_REQUIRED': True, + # Custom schema generator to include unified response format + 'SCHEMA_GENERATOR_CLASS': 'drf_spectacular.generators.SchemaGenerator', + # Include request/response examples + 'SERVE_PERMISSIONS': ['rest_framework.permissions.AllowAny'], + 'SERVE_AUTHENTICATION': None, # Allow unauthenticated access to docs + + # Tag configuration - prevent auto-generation and use explicit tags + 'TAGS': [ + {'name': 'Authentication', 'description': 'User authentication and registration'}, + {'name': 'Account', 'description': 'Account settings, team, and usage analytics'}, + {'name': 'Integration', 'description': 'Site integrations and sync'}, + {'name': 'System', 'description': 'Settings, prompts, and integrations'}, + {'name': 'Admin Billing', 'description': 'Admin-only billing management'}, + {'name': 'Billing', 'description': 'Credits, usage, and transactions'}, + {'name': 'Planner', 'description': 'Keywords, clusters, and content ideas'}, + {'name': 'Writer', 'description': 'Tasks, content, and images'}, + {'name': 'Automation', 'description': 'Automation configuration and runs'}, + {'name': 'Linker', 'description': 'Internal linking operations'}, + {'name': 'Optimizer', 'description': 'Content optimization operations'}, + {'name': 'Publisher', 'description': 'Publishing records and deployments'}, + ], + 'TAGS_ORDER': [ + 'Authentication', + 'Account', + 'Integration', + 'System', + 'Admin Billing', + 'Billing', + 'Planner', + 'Writer', + 'Automation', + 'Linker', + 'Optimizer', + 'Publisher', + ], + # Postprocessing hook to filter out auto-generated tags + 'POSTPROCESSING_HOOKS': ['igny8_core.api.schema_extensions.postprocess_schema_filter_tags'], + + # Swagger UI configuration + 'SWAGGER_UI_SETTINGS': { + 'deepLinking': True, + 'displayOperationId': False, + 'defaultModelsExpandDepth': 1, # Collapse models by default + 'defaultModelExpandDepth': 1, # Collapse model properties by default + 'defaultModelRendering': 'model', # Show models in a cleaner format + 'displayRequestDuration': True, + 'docExpansion': 'none', # Collapse all operations by default + 'filter': True, # Enable filter box + 'showExtensions': True, + 'showCommonExtensions': True, + 'tryItOutEnabled': True, # Enable "Try it out" by default + }, + + # ReDoc configuration + 'REDOC_UI_SETTINGS': { + 'hideDownloadButton': False, + 'hideHostname': False, + 'hideLoading': False, + 'hideSingleRequestSampleTab': False, + 'expandResponses': '200,201', # Expand successful responses + 'jsonSampleExpandLevel': 2, # Expand JSON samples 2 levels + 'hideFab': False, + 'theme': { + 'colors': { + 'primary': { + 'main': '#32329f' + } + } + } + }, + + # Schema presentation improvements + 'SCHEMA_COERCE_PATH_PK': True, + 'SCHEMA_COERCE_METHOD_NAMES': { + 'retrieve': 'get', + 'list': 'list', + 'create': 'post', + 'update': 'put', + 'partial_update': 'patch', + 'destroy': 'delete', + }, + + # Custom response format documentation + 'EXTENSIONS_INFO': { + 'x-code-samples': [ + { + 'lang': 'Python', + 'source': ''' +import requests + +headers = { + 'Authorization': 'Bearer ', + 'Content-Type': 'application/json' +} + +response = requests.get('https://api.igny8.com/api/v1/planner/keywords/', headers=headers) +data = response.json() + +if data['success']: + keywords = data['results'] # or data['data'] for single objects +else: + print(f"Error: {data['error']}") + ''' + }, + { + 'lang': 'JavaScript', + 'source': ''' +const response = await fetch('https://api.igny8.com/api/v1/planner/keywords/', { + headers: { + 'Authorization': 'Bearer ', + 'Content-Type': 'application/json' + } +}); + +const data = await response.json(); + +if (data.success) { + const keywords = data.results || data.data; +} else { + console.error('Error:', data.error); +} + ''' + } + ] + } +} + +# CORS Configuration +CORS_ALLOWED_ORIGINS = [ + "https://app.igny8.com", + "https://igny8.com", + "https://www.igny8.com", + "https://sites.igny8.com", + "http://localhost:5173", + "http://localhost:5174", + "http://localhost:5176", + "http://localhost:8024", + "http://localhost:3000", + "http://127.0.0.1:5173", + "http://127.0.0.1:5174", + "http://127.0.0.1:5176", + "http://127.0.0.1:8024", + "http://31.97.144.105:8024", +] + +CORS_ALLOW_CREDENTIALS = True + +# Allow custom headers for resource tracking +# Include default headers plus our custom debug header +CORS_ALLOW_HEADERS = [ + 'accept', + 'accept-encoding', + 'authorization', + 'content-type', + 'dnt', + 'origin', + 'user-agent', + 'x-csrftoken', + 'x-requested-with', + 'x-debug-resource-tracking', # Allow debug tracking header +] + +# Note: django-cors-headers has default headers that include the above. +# If you want to extend defaults, you can import default_headers from corsheaders.defaults +# For now, we're explicitly listing all needed headers including our custom one. + +# Expose custom headers to frontend +CORS_EXPOSE_HEADERS = [ + 'x-resource-tracking-id', # Expose request tracking ID +] + +# JWT Configuration +JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', SECRET_KEY) +JWT_ALGORITHM = 'HS256' +JWT_ACCESS_TOKEN_EXPIRY = timedelta(minutes=15) +JWT_REFRESH_TOKEN_EXPIRY = timedelta(days=30) # Extended to 30 days for persistent login + +# Celery Configuration +# FIXED: Use redis:// URL with explicit string parameters to avoid Celery backend key serialization issues +CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL', f"redis://{os.getenv('REDIS_HOST', 'redis')}:{os.getenv('REDIS_PORT', '6379')}/0") +CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND', f"redis://{os.getenv('REDIS_HOST', 'redis')}:{os.getenv('REDIS_PORT', '6379')}/0") +CELERY_ACCEPT_CONTENT = ['json'] +CELERY_TASK_SERIALIZER = 'json' +CELERY_RESULT_SERIALIZER = 'json' +CELERY_TIMEZONE = TIME_ZONE +CELERY_ENABLE_UTC = True +CELERY_TASK_TRACK_STARTED = True +CELERY_TASK_TIME_LIMIT = 30 * 60 # 30 minutes +CELERY_TASK_SOFT_TIME_LIMIT = 25 * 60 # 25 minutes +CELERY_WORKER_PREFETCH_MULTIPLIER = 1 +CELERY_WORKER_MAX_TASKS_PER_CHILD = 1000 +# FIXED: Add explicit backend options to prevent key serialization issues +CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = { + 'master_name': 'mymaster' +} if os.getenv('REDIS_SENTINEL_ENABLED', 'false').lower() == 'true' else {} +CELERY_REDIS_BACKEND_USE_SSL = os.getenv('REDIS_SSL_ENABLED', 'false').lower() == 'true' + +# Publish/Sync Logging Configuration +PUBLISH_SYNC_LOG_DIR = os.path.join(BASE_DIR, 'logs', 'publish-sync-logs') +os.makedirs(PUBLISH_SYNC_LOG_DIR, exist_ok=True) + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'verbose': { + 'format': '[{asctime}] [{levelname}] [{name}] {message}', + 'style': '{', + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + 'publish_sync': { + 'format': '[{asctime}] [{levelname}] {message}', + 'style': '{', + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'verbose', + }, + 'publish_sync_file': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': os.path.join(PUBLISH_SYNC_LOG_DIR, 'publish-sync.log'), + 'maxBytes': 10 * 1024 * 1024, # 10 MB + 'backupCount': 10, + 'formatter': 'publish_sync', + }, + 'wordpress_api_file': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': os.path.join(PUBLISH_SYNC_LOG_DIR, 'wordpress-api.log'), + 'maxBytes': 10 * 1024 * 1024, # 10 MB + 'backupCount': 10, + 'formatter': 'publish_sync', + }, + 'webhook_file': { + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': os.path.join(PUBLISH_SYNC_LOG_DIR, 'webhooks.log'), + 'maxBytes': 10 * 1024 * 1024, # 10 MB + 'backupCount': 10, + 'formatter': 'publish_sync', + }, + }, + 'loggers': { + 'publish_sync': { + 'handlers': ['console', 'publish_sync_file'], + 'level': 'INFO', + 'propagate': False, + }, + 'wordpress_api': { + 'handlers': ['console', 'wordpress_api_file'], + 'level': 'INFO', + 'propagate': False, + }, + 'webhooks': { + 'handlers': ['console', 'webhook_file'], + 'level': 'INFO', + 'propagate': False, + }, + }, +} + +# Billing / Payments configuration +STRIPE_PUBLIC_KEY = os.getenv('STRIPE_PUBLIC_KEY', '') +STRIPE_SECRET_KEY = os.getenv('STRIPE_SECRET_KEY', '') +STRIPE_WEBHOOK_SECRET = os.getenv('STRIPE_WEBHOOK_SECRET', '') +PAYPAL_CLIENT_ID = os.getenv('PAYPAL_CLIENT_ID', '') +PAYPAL_CLIENT_SECRET = os.getenv('PAYPAL_CLIENT_SECRET', '') +PAYPAL_API_BASE = os.getenv('PAYPAL_API_BASE', 'https://api-m.sandbox.paypal.com') diff --git a/tenant/frontend/src/App.tsx b/tenant/frontend/src/App.tsx new file mode 100644 index 00000000..222d4e7c --- /dev/null +++ b/tenant/frontend/src/App.tsx @@ -0,0 +1,843 @@ +import { Suspense, lazy, useEffect } from "react"; +import { Routes, Route, Navigate } from "react-router-dom"; +import { HelmetProvider } from "react-helmet-async"; +import AppLayout from "./layout/AppLayout"; +import { ScrollToTop } from "./components/common/ScrollToTop"; +import ProtectedRoute from "./components/auth/ProtectedRoute"; +import ModuleGuard from "./components/common/ModuleGuard"; +import AdminGuard from "./components/auth/AdminGuard"; +import GlobalErrorDisplay from "./components/common/GlobalErrorDisplay"; +import LoadingStateMonitor from "./components/common/LoadingStateMonitor"; +import { useAuthStore } from "./store/authStore"; + +// Auth pages - loaded immediately (needed for login) +import SignIn from "./pages/AuthPages/SignIn"; +import SignUp from "./pages/AuthPages/SignUp"; +import NotFound from "./pages/OtherPage/NotFound"; + +// Lazy load all other pages - only loads when navigated to +const Home = lazy(() => import("./pages/Dashboard/Home")); + +// Planner Module - Lazy loaded +const PlannerDashboard = lazy(() => import("./pages/Planner/Dashboard")); +const Keywords = lazy(() => import("./pages/Planner/Keywords")); +const Clusters = lazy(() => import("./pages/Planner/Clusters")); +const ClusterDetail = lazy(() => import("./pages/Planner/ClusterDetail")); +const Ideas = lazy(() => import("./pages/Planner/Ideas")); +const KeywordOpportunities = lazy(() => import("./pages/Planner/KeywordOpportunities")); + +// Writer Module - Lazy loaded +const WriterDashboard = lazy(() => import("./pages/Writer/Dashboard")); +const Tasks = lazy(() => import("./pages/Writer/Tasks")); +const Content = lazy(() => import("./pages/Writer/Content")); +const ContentView = lazy(() => import("./pages/Writer/ContentView")); +const Drafts = lazy(() => import("./pages/Writer/Drafts")); +const Images = lazy(() => import("./pages/Writer/Images")); +const Review = lazy(() => import("./pages/Writer/Review")); +const Published = lazy(() => import("./pages/Writer/Published")); + +// Automation Module - Lazy loaded +const AutomationPage = lazy(() => import("./pages/Automation/AutomationPage")); + +// Linker Module - Lazy loaded +const LinkerDashboard = lazy(() => import("./pages/Linker/Dashboard")); +const LinkerContentList = lazy(() => import("./pages/Linker/ContentList")); + +// Optimizer Module - Lazy loaded +const OptimizerDashboard = lazy(() => import("./pages/Optimizer/Dashboard")); +const OptimizerContentSelector = lazy(() => import("./pages/Optimizer/ContentSelector")); +const AnalysisPreview = lazy(() => import("./pages/Optimizer/AnalysisPreview")); + +// Thinker Module - Lazy loaded +const ThinkerDashboard = lazy(() => import("./pages/Thinker/Dashboard")); +const Prompts = lazy(() => import("./pages/Thinker/Prompts")); +const AuthorProfiles = lazy(() => import("./pages/Thinker/AuthorProfiles")); +const ThinkerProfile = lazy(() => import("./pages/Thinker/Profile")); +const Strategies = lazy(() => import("./pages/Thinker/Strategies")); +const ImageTesting = lazy(() => import("./pages/Thinker/ImageTesting")); + +// Billing Module - Lazy loaded +const Credits = lazy(() => import("./pages/Billing/Credits")); +const Transactions = lazy(() => import("./pages/Billing/Transactions")); +const Usage = lazy(() => import("./pages/Billing/Usage")); +const CreditsAndBilling = lazy(() => import("./pages/Settings/CreditsAndBilling")); +const PurchaseCreditsPage = lazy(() => import("./pages/account/PurchaseCreditsPage")); +const AccountBillingPage = lazy(() => import("./pages/account/AccountBillingPage")); +const PlansAndBillingPage = lazy(() => import("./pages/account/PlansAndBillingPage")); +const AccountSettingsPage = lazy(() => import("./pages/account/AccountSettingsPage")); +const TeamManagementPage = lazy(() => import("./pages/account/TeamManagementPage")); +const UsageAnalyticsPage = lazy(() => import("./pages/account/UsageAnalyticsPage")); + +// Admin Module - Lazy loaded (mixed folder casing in repo, match actual file paths) +const AdminBilling = lazy(() => import("./pages/Admin/AdminBilling")); +const PaymentApprovalPage = lazy(() => import("./pages/admin/PaymentApprovalPage")); +const AdminSystemDashboard = lazy(() => import("./pages/admin/AdminSystemDashboard")); +const AdminAllAccountsPage = lazy(() => import("./pages/admin/AdminAllAccountsPage")); +const AdminSubscriptionsPage = lazy(() => import("./pages/admin/AdminSubscriptionsPage")); +const AdminAccountLimitsPage = lazy(() => import("./pages/admin/AdminAccountLimitsPage")); +const AdminAllInvoicesPage = lazy(() => import("./pages/admin/AdminAllInvoicesPage")); +const AdminAllPaymentsPage = lazy(() => import("./pages/admin/AdminAllPaymentsPage")); +const AdminCreditPackagesPage = lazy(() => import("./pages/admin/AdminCreditPackagesPage")); +const AdminCreditCostsPage = lazy(() => import("./pages/Admin/AdminCreditCostsPage")); +const AdminAllUsersPage = lazy(() => import("./pages/admin/AdminAllUsersPage")); +const AdminRolesPermissionsPage = lazy(() => import("./pages/admin/AdminRolesPermissionsPage")); +const AdminActivityLogsPage = lazy(() => import("./pages/admin/AdminActivityLogsPage")); +const AdminSystemSettingsPage = lazy(() => import("./pages/admin/AdminSystemSettingsPage")); +const AdminSystemHealthPage = lazy(() => import("./pages/admin/AdminSystemHealthPage")); +const AdminAPIMonitorPage = lazy(() => import("./pages/admin/AdminAPIMonitorPage")); + +// Reference Data - Lazy loaded +const SeedKeywords = lazy(() => import("./pages/Reference/SeedKeywords")); +const ReferenceIndustries = lazy(() => import("./pages/Reference/Industries")); + +// Setup Pages - Lazy loaded +const IndustriesSectorsKeywords = lazy(() => import("./pages/Setup/IndustriesSectorsKeywords")); + +// Settings - Lazy loaded +const GeneralSettings = lazy(() => import("./pages/Settings/General")); +const ProfileSettingsPage = lazy(() => import("./pages/settings/ProfileSettingsPage")); +const Users = lazy(() => import("./pages/Settings/Users")); +const Subscriptions = lazy(() => import("./pages/Settings/Subscriptions")); +const SystemSettings = lazy(() => import("./pages/Settings/System")); +const AccountSettings = lazy(() => import("./pages/Settings/Account")); +const ModuleSettings = lazy(() => import("./pages/Settings/Modules")); +const AISettings = lazy(() => import("./pages/Settings/AI")); +const Plans = lazy(() => import("./pages/Settings/Plans")); +const Industries = lazy(() => import("./pages/Settings/Industries")); +const MasterStatus = lazy(() => import("./pages/Settings/MasterStatus")); +const ApiMonitor = lazy(() => import("./pages/Settings/ApiMonitor")); +const DebugStatus = lazy(() => import("./pages/Settings/DebugStatus")); +const Integration = lazy(() => import("./pages/Settings/Integration")); +const Publishing = lazy(() => import("./pages/Settings/Publishing")); +const Sites = lazy(() => import("./pages/Settings/Sites")); +const ImportExport = lazy(() => import("./pages/Settings/ImportExport")); + +// Sites - Lazy loaded +const SiteList = lazy(() => import("./pages/Sites/List")); +const SiteManage = lazy(() => import("./pages/Sites/Manage")); +const SiteDashboard = lazy(() => import("./pages/Sites/Dashboard")); +const SiteContent = lazy(() => import("./pages/Sites/Content")); +const PageManager = lazy(() => import("./pages/Sites/PageManager")); +const PostEditor = lazy(() => import("./pages/Sites/PostEditor")); +const SiteSettings = lazy(() => import("./pages/Sites/Settings")); +const SyncDashboard = lazy(() => import("./pages/Sites/SyncDashboard")); +const DeploymentPanel = lazy(() => import("./pages/Sites/DeploymentPanel")); + +// Help - Lazy loaded +const Help = lazy(() => import("./pages/Help/Help")); +const Docs = lazy(() => import("./pages/Help/Docs")); +const SystemTesting = lazy(() => import("./pages/Help/SystemTesting")); +const FunctionTesting = lazy(() => import("./pages/Help/FunctionTesting")); + +// Components - Lazy loaded +const Components = lazy(() => import("./pages/Components")); + +// UI Elements - Lazy loaded (rarely used) +const Alerts = lazy(() => import("./pages/Settings/UiElements/Alerts")); +const Avatars = lazy(() => import("./pages/Settings/UiElements/Avatars")); +const Badges = lazy(() => import("./pages/Settings/UiElements/Badges")); +const Breadcrumb = lazy(() => import("./pages/Settings/UiElements/Breadcrumb")); +const Buttons = lazy(() => import("./pages/Settings/UiElements/Buttons")); +const ButtonsGroup = lazy(() => import("./pages/Settings/UiElements/ButtonsGroup")); +const Cards = lazy(() => import("./pages/Settings/UiElements/Cards")); +const Carousel = lazy(() => import("./pages/Settings/UiElements/Carousel")); +const Dropdowns = lazy(() => import("./pages/Settings/UiElements/Dropdowns")); +const ImagesUI = lazy(() => import("./pages/Settings/UiElements/Images")); +const Links = lazy(() => import("./pages/Settings/UiElements/Links")); +const List = lazy(() => import("./pages/Settings/UiElements/List")); +const Modals = lazy(() => import("./pages/Settings/UiElements/Modals")); +const Notifications = lazy(() => import("./pages/Settings/UiElements/Notifications")); +const Pagination = lazy(() => import("./pages/Settings/UiElements/Pagination")); +const Popovers = lazy(() => import("./pages/Settings/UiElements/Popovers")); +const PricingTable = lazy(() => import("./pages/Settings/UiElements/PricingTable")); +const Progressbar = lazy(() => import("./pages/Settings/UiElements/Progressbar")); +const Ribbons = lazy(() => import("./pages/Settings/UiElements/Ribbons")); +const Spinners = lazy(() => import("./pages/Settings/UiElements/Spinners")); +const Tabs = lazy(() => import("./pages/Settings/UiElements/Tabs")); +const Tooltips = lazy(() => import("./pages/Settings/UiElements/Tooltips")); +const Videos = lazy(() => import("./pages/Settings/UiElements/Videos")); + +export default function App() { + const isAuthenticated = useAuthStore((state) => state.isAuthenticated); + const refreshUser = useAuthStore((state) => state.refreshUser); + const logout = useAuthStore((state) => state.logout); + + useEffect(() => { + const { token } = useAuthStore.getState(); + if (!isAuthenticated || !token) return; + + refreshUser().catch((error) => { + // Avoid log spam on auth pages when token is missing/expired + if (error?.message?.includes('Authentication credentials were not provided')) { + return; + } + console.warn('Session validation failed:', error); + logout(); + }); + }, [isAuthenticated, refreshUser, logout]); + + return ( + <> + + + + + + {/* Auth Routes - Public */} + } /> + } /> + + {/* Protected Routes - Require Authentication */} + + + + } + > + {/* Dashboard */} + + + + } /> + + {/* Planner Module - Redirect dashboard to keywords */} + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + {/* Writer Module - Redirect dashboard to tasks */} + } /> + + + + + + } /> + {/* Writer Content Routes - Order matters: list route must come before detail route */} + + + + + + } /> + {/* Content detail view - matches /writer/content/:id (e.g., /writer/content/10) */} + + + + + + } /> + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + {/* Automation Module */} + + + + } /> + + {/* Linker Module - Redirect dashboard to content */} + } /> + + + + + + } /> + + {/* Optimizer Module - Redirect dashboard to content */} + } /> + + + + + + } /> + + + + + + } /> + + {/* Thinker Module */} + {/* Thinker Module - Redirect dashboard to prompts */} + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + + + + + } /> + + {/* Billing Module */} + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* Account Section - Billing & Management Pages */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* Admin Routes */} + {/* Admin Dashboard */} + + + + } /> + + {/* Admin Account Management */} + + + + } /> + + + + } /> + + + + } /> + + {/* Admin Billing Administration */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* Admin User Administration */} + + + + } /> + + + + } /> + + + + } /> + + {/* Admin System Configuration */} + + + + } /> + + {/* Admin Monitoring */} + + + + } /> + + + + } /> + + {/* Reference Data */} + + + + } /> + + + + } /> + + + + } /> + + {/* Setup Pages */} + + + + } /> + {/* Legacy redirect */} + } /> + + {/* Settings */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* Sites Management */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + {/* Help */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* UI Elements */} + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + + + } /> + + {/* Components (Showcase Page) */} + + + + } /> + + {/* Redirect old notification route */} + + + + } /> + + + {/* Fallback Route */} + } /> + + + + ); +} diff --git a/tenant/frontend/src/components/auth/AdminGuard.tsx b/tenant/frontend/src/components/auth/AdminGuard.tsx new file mode 100644 index 00000000..88b407cf --- /dev/null +++ b/tenant/frontend/src/components/auth/AdminGuard.tsx @@ -0,0 +1,25 @@ +import { ReactNode } from "react"; +import { Navigate } from "react-router-dom"; +import { useAuthStore } from "../../store/authStore"; + +interface AdminGuardProps { + children: ReactNode; +} + +/** + * AdminGuard - restricts access to system account (aws-admin/default) or developer + */ +export default function AdminGuard({ children }: AdminGuardProps) { + const { user } = useAuthStore(); + const role = user?.role; + const accountSlug = user?.account?.slug; + const isSystemAccount = accountSlug === 'aws-admin' || accountSlug === 'default-account' || accountSlug === 'default'; + const allowed = role === 'developer' || isSystemAccount; + + if (!allowed) { + return ; + } + + return <>{children}; +} + diff --git a/tenant/frontend/src/components/auth/ProtectedRoute.tsx b/tenant/frontend/src/components/auth/ProtectedRoute.tsx new file mode 100644 index 00000000..f55f851d --- /dev/null +++ b/tenant/frontend/src/components/auth/ProtectedRoute.tsx @@ -0,0 +1,183 @@ +import { useEffect, ReactNode, useState } from "react"; +import { Navigate, useLocation } from "react-router-dom"; +import { useAuthStore } from "../../store/authStore"; +import { useErrorHandler } from "../../hooks/useErrorHandler"; +import { trackLoading } from "../common/LoadingStateMonitor"; +import { fetchAPI } from "../../services/api"; + +interface ProtectedRouteProps { + children: ReactNode; +} + +/** + * ProtectedRoute component - guards routes requiring authentication + * Redirects to /signin if user is not authenticated + */ +export default function ProtectedRoute({ children }: ProtectedRouteProps) { + const { isAuthenticated, loading, user, logout } = useAuthStore(); + const location = useLocation(); + const { addError } = useErrorHandler('ProtectedRoute'); + const [showError, setShowError] = useState(false); + const [errorMessage, setErrorMessage] = useState(''); + const [paymentCheck, setPaymentCheck] = useState<{ + loading: boolean; + hasDefault: boolean; + hasAny: boolean; + }>({ loading: true, hasDefault: false, hasAny: false }); + + const PLAN_ALLOWED_PATHS = [ + '/account/plans', + '/account/billing', + '/account/purchase-credits', + '/account/settings', + '/account/team', + '/account/usage', + '/billing', + ]; + + const isPlanAllowedPath = PLAN_ALLOWED_PATHS.some((prefix) => + location.pathname.startsWith(prefix) + ); + + // Track loading state + useEffect(() => { + trackLoading('auth-loading', loading); + }, [loading]); + + // Fetch payment methods to confirm default method availability + useEffect(() => { + if (!isAuthenticated) { + setPaymentCheck({ loading: false, hasDefault: false, hasAny: false }); + return; + } + + let cancelled = false; + const loadPaymentMethods = async () => { + setPaymentCheck((prev) => ({ ...prev, loading: true })); + try { + const data = await fetchAPI('/v1/billing/payment-methods/'); + const methods = data?.results || []; + const hasAny = methods.length > 0; + // Treat id 14 as the intended default, or any method marked default + const hasDefault = methods.some((m: any) => m.is_default) || methods.some((m: any) => String(m.id) === '14'); + if (!cancelled) { + setPaymentCheck({ loading: false, hasDefault, hasAny }); + } + } catch (err) { + if (!cancelled) { + setPaymentCheck({ loading: false, hasDefault: false, hasAny: false }); + console.warn('ProtectedRoute: failed to fetch payment methods', err); + } + } + }; + + loadPaymentMethods(); + + return () => { + cancelled = true; + }; + }, [isAuthenticated]); + + // Validate account + plan whenever auth/user changes + useEffect(() => { + if (!isAuthenticated) { + return; + } + + if (!user?.account) { + setErrorMessage('This user is not linked to an account. Please contact support.'); + logout(); + return; + } + }, [isAuthenticated, user, logout]); + + // Immediate check on mount: if loading is true, reset it immediately + useEffect(() => { + if (loading) { + console.warn('ProtectedRoute: Loading state is true on mount, resetting immediately'); + useAuthStore.setState({ loading: false }); + } + }, []); + + // Safety timeout: if loading becomes true and stays stuck, show error + useEffect(() => { + if (loading) { + const timeout1 = setTimeout(() => { + setErrorMessage('Authentication check is taking longer than expected. This may indicate a network or server issue.'); + setShowError(true); + addError(new Error('Auth loading stuck for 3 seconds'), 'ProtectedRoute'); + }, 3000); + + const timeout2 = setTimeout(() => { + console.error('ProtectedRoute: Loading state stuck for 5 seconds, forcing reset'); + useAuthStore.setState({ loading: false }); + setShowError(false); + }, 5000); + + return () => { + clearTimeout(timeout1); + clearTimeout(timeout2); + }; + } else { + setShowError(false); + } + }, [loading, addError]); + + // Show loading state while checking authentication + if (loading) { + return ( +
+
+
+

Loading...

+ + {showError && ( +
+

+ {errorMessage} +

+ +
+ )} +
+
+ ); + } + + // Redirect to signin if not authenticated + if (!isAuthenticated) { + return ; + } + + // If authenticated but missing an active plan, keep user inside billing/onboarding + const accountStatus = user?.account?.status; + const accountInactive = accountStatus && ['suspended', 'cancelled'].includes(accountStatus); + const missingPlan = user?.account && !user.account.plan; + const missingPayment = !paymentCheck.loading && (!paymentCheck.hasDefault || !paymentCheck.hasAny); + + if ((missingPlan || accountInactive || missingPayment) && !isPlanAllowedPath) { + if (paymentCheck.loading) { + return ( +
+
+
+

Checking billing status...

+
+
+ ); + } + return ; + } + + return <>{children}; +} + diff --git a/tenant/frontend/src/components/common/ModuleGuard.tsx b/tenant/frontend/src/components/common/ModuleGuard.tsx new file mode 100644 index 00000000..a33cea96 --- /dev/null +++ b/tenant/frontend/src/components/common/ModuleGuard.tsx @@ -0,0 +1,41 @@ +import { ReactNode, useEffect } from 'react'; +import { Navigate } from 'react-router-dom'; +import { useSettingsStore } from '../../store/settingsStore'; +import { isModuleEnabled } from '../../config/modules.config'; +import { isUpgradeError } from '../../utils/upgrade'; + +interface ModuleGuardProps { + module: string; + children: ReactNode; + redirectTo?: string; +} + +/** + * ModuleGuard - Protects routes based on module enable status + * Redirects to settings page if module is disabled + */ +export default function ModuleGuard({ module, children, redirectTo = '/settings/modules' }: ModuleGuardProps) { + const { moduleEnableSettings, loadModuleEnableSettings, loading } = useSettingsStore(); + + useEffect(() => { + // Load module enable settings if not already loaded + if (!moduleEnableSettings && !loading) { + loadModuleEnableSettings(); + } + }, [moduleEnableSettings, loading, loadModuleEnableSettings]); + + // While loading, show children (optimistic rendering) + if (loading || !moduleEnableSettings) { + return <>{children}; + } + + // Check if module is enabled + const enabled = isModuleEnabled(module, moduleEnableSettings as any); + + if (!enabled) { + return ; + } + + return <>{children}; +} + diff --git a/tenant/frontend/src/layout/AppSidebar.tsx b/tenant/frontend/src/layout/AppSidebar.tsx new file mode 100644 index 00000000..ea2e4a08 --- /dev/null +++ b/tenant/frontend/src/layout/AppSidebar.tsx @@ -0,0 +1,636 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { Link, useLocation } from "react-router-dom"; + +// Assume these icons are imported from an icon library +import { + ChevronDownIcon, + GridIcon, + HorizontaLDots, + ListIcon, + PieChartIcon, + PlugInIcon, + TaskIcon, + BoltIcon, + DocsIcon, + PageIcon, + DollarLineIcon, + FileIcon, + UserIcon, + UserCircleIcon, +} from "../icons"; +import { useSidebar } from "../context/SidebarContext"; +import SidebarWidget from "./SidebarWidget"; +import { APP_VERSION } from "../config/version"; +import { useAuthStore } from "../store/authStore"; +import { useSettingsStore } from "../store/settingsStore"; +import ApiStatusIndicator from "../components/sidebar/ApiStatusIndicator"; + +type NavItem = { + name: string; + icon: React.ReactNode; + path?: string; + subItems?: { name: string; path: string; pro?: boolean; new?: boolean }[]; +}; + +type MenuSection = { + label: string; + items: NavItem[]; +}; + +const AppSidebar: React.FC = () => { + const { isExpanded, isMobileOpen, isHovered, setIsHovered } = useSidebar(); + const location = useLocation(); + const { user, isAuthenticated } = useAuthStore(); + const { moduleEnableSettings, isModuleEnabled: checkModuleEnabled, loadModuleEnableSettings, loading: settingsLoading } = useSettingsStore(); + + // Show admin menu only for system account (aws-admin/default) or developer + const isAwsAdminAccount = Boolean( + user?.account?.slug === 'aws-admin' || + user?.account?.slug === 'default-account' || + user?.account?.slug === 'default' || + user?.role === 'developer' + ); + + // Helper to check if module is enabled - memoized to prevent infinite loops + const moduleEnabled = useCallback((moduleName: string): boolean => { + if (!moduleEnableSettings) return true; // Default to enabled if not loaded + return checkModuleEnabled(moduleName); + }, [moduleEnableSettings, checkModuleEnabled]); + + const [openSubmenu, setOpenSubmenu] = useState<{ + sectionIndex: number; + itemIndex: number; + } | null>(null); + const [subMenuHeight, setSubMenuHeight] = useState>( + {} + ); + const subMenuRefs = useRef>({}); + + const isActive = useCallback( + (path: string) => location.pathname === path, + [location.pathname] + ); + + // Load module enable settings on mount (only once) - but only if user is authenticated + useEffect(() => { + // Only load if user is authenticated and settings aren't already loaded + // Skip for non-module pages to reduce unnecessary calls (e.g., account/billing/signup) + const path = location.pathname || ''; + const isModulePage = [ + '/planner', + '/writer', + '/automation', + '/thinker', + '/linker', + '/optimizer', + '/publisher', + '/dashboard', + '/home', + ].some((p) => path.startsWith(p)); + + if (user && isAuthenticated && isModulePage && !moduleEnableSettings && !settingsLoading) { + loadModuleEnableSettings().catch((error) => { + console.warn('Failed to load module enable settings:', error); + }); + } + }, [user, isAuthenticated, location.pathname]); // Only run when user/auth or route changes + + // Define menu sections with useMemo to prevent recreation on every render + // Filter out disabled modules based on module enable settings + // New structure: Dashboard (standalone) → SETUP → WORKFLOW → SETTINGS + const menuSections: MenuSection[] = useMemo(() => { + // SETUP section items (single items, no dropdowns - submenus shown as in-page navigation) + const setupItems: NavItem[] = [ + { + icon: , + name: "Add Keywords", + path: "/setup/add-keywords", + }, + { + icon: , + name: "Sites", + path: "/sites", // Submenus shown as in-page navigation + }, + ]; + + // Add Thinker if enabled (single item, no dropdown) + if (moduleEnabled('thinker')) { + setupItems.push({ + icon: , + name: "Thinker", + path: "/thinker/prompts", // Default to prompts, submenus shown as in-page navigation + }); + } + + // WORKFLOW section items (single items, no dropdowns - submenus shown as in-page navigation) + const workflowItems: NavItem[] = []; + + // Add Planner if enabled (single item, no dropdown) + if (moduleEnabled('planner')) { + workflowItems.push({ + icon: , + name: "Planner", + path: "/planner/keywords", // Default to keywords, submenus shown as in-page navigation + }); + } + + // Add Writer if enabled (single item, no dropdown) + if (moduleEnabled('writer')) { + workflowItems.push({ + icon: , + name: "Writer", + path: "/writer/tasks", // Default to tasks, submenus shown as in-page navigation + }); + } + + // Add Automation (always available if Writer is enabled) + if (moduleEnabled('writer')) { + workflowItems.push({ + icon: , + name: "Automation", + path: "/automation", + }); + } + + // Add Linker if enabled (single item, no dropdown) + if (moduleEnabled('linker')) { + workflowItems.push({ + icon: , + name: "Linker", + path: "/linker/content", + }); + } + + // Add Optimizer if enabled (single item, no dropdown) + if (moduleEnabled('optimizer')) { + workflowItems.push({ + icon: , + name: "Optimizer", + path: "/optimizer/content", + }); + } + + return [ + // Dashboard is standalone (no section header) + { + label: "", // Empty label for standalone Dashboard + items: [ + { + icon: , + name: "Dashboard", + path: "/", + }, + ], + }, + { + label: "SETUP", + items: setupItems, + }, + { + label: "WORKFLOW", + items: workflowItems, + }, + { + label: "ACCOUNT", + items: [ + { + icon: , + name: "Account Settings", + path: "/account/settings", + }, + { + icon: , + name: "Plans & Billing", + path: "/account/billing", + }, + { + icon: , + name: "Plans", + path: "/account/plans", + }, + { + icon: , + name: "Team Management", + path: "/account/team", + }, + { + icon: , + name: "Usage & Analytics", + path: "/account/usage", + }, + ], + }, + { + label: "SETTINGS", + items: [ + { + icon: , + name: "Profile Settings", + path: "/settings/profile", + }, + // Integration is admin-only; hide for non-privileged users (handled in render) + { + icon: , + name: "Integration", + path: "/settings/integration", + adminOnly: true, + }, + { + icon: , + name: "Publishing", + path: "/settings/publishing", + }, + { + icon: , + name: "Import / Export", + path: "/settings/import-export", + }, + ], + }, + { + label: "HELP & DOCS", + items: [ + { + icon: , + name: "Help & Documentation", + path: "/help", + }, + ], + }, + ]; + }, [moduleEnabled]); + + // Admin section - only shown for users in aws-admin account + const adminSection: MenuSection = useMemo(() => ({ + label: "ADMIN", + items: [ + { + icon: , + name: "System Dashboard", + path: "/admin/dashboard", + }, + { + icon: , + name: "Account Management", + subItems: [ + { name: "All Accounts", path: "/admin/accounts" }, + { name: "Subscriptions", path: "/admin/subscriptions" }, + { name: "Account Limits", path: "/admin/account-limits" }, + ], + }, + { + icon: , + name: "Billing Administration", + subItems: [ + { name: "Billing Overview", path: "/admin/billing" }, + { name: "Invoices", path: "/admin/invoices" }, + { name: "Payments", path: "/admin/payments" }, + { name: "Credit Costs Config", path: "/admin/credit-costs" }, + { name: "Credit Packages", path: "/admin/credit-packages" }, + ], + }, + { + icon: , + name: "User Administration", + subItems: [ + { name: "All Users", path: "/admin/users" }, + { name: "Roles & Permissions", path: "/admin/roles" }, + { name: "Activity Logs", path: "/admin/activity-logs" }, + ], + }, + { + icon: , + name: "System Configuration", + subItems: [ + { name: "System Settings", path: "/admin/system-settings" }, + { name: "AI Settings", path: "/admin/ai-settings" }, + { name: "Module Settings", path: "/admin/module-settings" }, + { name: "Integration Settings", path: "/admin/integration-settings" }, + ], + }, + { + icon: , + name: "Monitoring", + subItems: [ + { name: "System Health", path: "/settings/status" }, + { name: "API Monitor", path: "/settings/api-monitor" }, + { name: "Debug Status", path: "/settings/debug-status" }, + ], + }, + { + icon: , + name: "Developer Tools", + subItems: [ + { name: "Function Testing", path: "/admin/function-testing" }, + { name: "System Testing", path: "/admin/system-testing" }, + { name: "UI Elements", path: "/admin/ui-elements" }, + ], + }, + ], + }), []); + + // Combine all sections, including admin if user is in aws-admin account + const allSections = useMemo(() => { + const baseSections = menuSections.map(section => { + // Filter adminOnly items for non-system users + const filteredItems = section.items.filter((item: any) => { + if ((item as any).adminOnly && !isAwsAdminAccount) return false; + return true; + }); + return { ...section, items: filteredItems }; + }); + return isAwsAdminAccount + ? [...baseSections, adminSection] + : baseSections; + }, [isAwsAdminAccount, menuSections, adminSection]); + + useEffect(() => { + const currentPath = location.pathname; + let foundMatch = false; + + // Find the matching submenu for the current path + allSections.forEach((section, sectionIndex) => { + section.items.forEach((nav, itemIndex) => { + if (nav.subItems && !foundMatch) { + const shouldOpen = nav.subItems.some((subItem) => { + if (currentPath === subItem.path) return true; + if (subItem.path !== '/' && currentPath.startsWith(subItem.path + '/')) return true; + return false; + }); + + if (shouldOpen) { + setOpenSubmenu((prev) => { + // Only update if different to prevent infinite loops + if (prev?.sectionIndex === sectionIndex && prev?.itemIndex === itemIndex) { + return prev; + } + return { + sectionIndex, + itemIndex, + }; + }); + foundMatch = true; + } + } + }); + }); + + // If no match found and we're not on a submenu path, don't change the state + // This allows manual toggles to persist + }, [location.pathname, allSections]); + + useEffect(() => { + if (openSubmenu !== null) { + const key = `${openSubmenu.sectionIndex}-${openSubmenu.itemIndex}`; + // Use requestAnimationFrame and setTimeout to ensure DOM is ready + const frameId = requestAnimationFrame(() => { + setTimeout(() => { + const element = subMenuRefs.current[key]; + if (element) { + // scrollHeight should work even when height is 0px due to overflow-hidden + const scrollHeight = element.scrollHeight; + if (scrollHeight > 0) { + setSubMenuHeight((prevHeights) => { + // Only update if height changed to prevent infinite loops + if (prevHeights[key] === scrollHeight) { + return prevHeights; + } + return { + ...prevHeights, + [key]: scrollHeight, + }; + }); + } + } + }, 50); + }); + return () => cancelAnimationFrame(frameId); + } + }, [openSubmenu]); + + const handleSubmenuToggle = (sectionIndex: number, itemIndex: number) => { + setOpenSubmenu((prevOpenSubmenu) => { + if ( + prevOpenSubmenu && + prevOpenSubmenu.sectionIndex === sectionIndex && + prevOpenSubmenu.itemIndex === itemIndex + ) { + return null; + } + return { sectionIndex, itemIndex }; + }); + }; + + const renderMenuItems = (items: NavItem[], sectionIndex: number) => ( +
    + {items.map((nav, itemIndex) => ( +
  • + {nav.subItems ? ( + + ) : ( + nav.path && ( + + + {nav.icon} + + {(isExpanded || isHovered || isMobileOpen) && ( + {nav.name} + )} + + ) + )} + {nav.subItems && (isExpanded || isHovered || isMobileOpen) && ( +
    { + subMenuRefs.current[`${sectionIndex}-${itemIndex}`] = el; + }} + className="overflow-hidden transition-all duration-300" + style={{ + height: + openSubmenu?.sectionIndex === sectionIndex && openSubmenu?.itemIndex === itemIndex + ? `${subMenuHeight[`${sectionIndex}-${itemIndex}`]}px` + : "0px", + }} + > +
      + {nav.subItems.map((subItem) => ( +
    • + + {subItem.name} + + {subItem.new && ( + + new + + )} + {subItem.pro && ( + + pro + + )} + + +
    • + ))} +
    +
    + )} +
  • + ))} +
+ ); + + return ( + + ); +}; + +export default AppSidebar; diff --git a/tenant/master-docs/00-system/07-MULTITENANCY-ACCESS-REFERENCE.md b/tenant/master-docs/00-system/07-MULTITENANCY-ACCESS-REFERENCE.md new file mode 100644 index 00000000..78de8df3 --- /dev/null +++ b/tenant/master-docs/00-system/07-MULTITENANCY-ACCESS-REFERENCE.md @@ -0,0 +1,104 @@ +# Multi-Tenancy & Access Reference (Current State) + +## Purpose +Authoritative map of tenant isolation, role access, and payment/API-key handling across the stack. Built from code as of Dec 2025. + +## Core Enforcement Points (backend) +- Middleware: + - `backend/igny8_core/auth/middleware.py` (`AccountContextMiddleware`, ~L1-L220): resolves `request.account` from JWT/API key; blocks inactive/suspended accounts. + - `backend/igny8_core/middleware/request_id.py` (~L1-L70): request ID (not tenancy). + - `backend/igny8_core/middleware/resource_tracker.py` (~L1-L170): metrics (not tenancy). +- Base viewsets: + - `backend/igny8_core/api/base.py` (`AccountModelViewSet`, ~L1-L240): filters by `request.account`; admin/developer/system overrides; sets account on create. + - `backend/igny8_core/api/base.py` (`SiteSectorModelViewSet`, ~L238-L430): additionally filters by site/sector and user’s accessible sites (SiteUserAccess) unless admin/developer/system. +- Permissions: + - `backend/igny8_core/api/permissions.py`: + - `IsAuthenticatedAndActive`, `HasTenantAccess` (default in settings). + - `IsViewerOrAbove`, `IsEditorOrAbove`, `IsAdminOrOwner`. + - `IsSystemAccountOrDeveloper` (system/admin for integrations). + - Module-specific permissions also appear in `backend/igny8_core/auth/permissions.py` (legacy IsOwnerOrAdmin, IsEditorOrAbove, IsViewerOrAbove, AccountPermission). +- Settings defaults: + - `backend/igny8_core/settings.py` REST_FRAMEWORK `DEFAULT_PERMISSION_CLASSES` = `IsAuthenticatedAndActive` + `HasTenantAccess`. + - Auth order: APIKeyAuthentication → JWTAuthentication → CSRFExemptSessionAuthentication → BasicAuth. + - Throttling: `DebugScopedRateThrottle` bypasses throttles for authenticated users/system/debug. +- Models with enforced account/site/sector: + - Base models `AccountBaseModel`, `SiteSectorBaseModel` in `backend/igny8_core/auth/models.py` (top of file). + +## Flow (text flowchart) +``` +Request + -> Middleware: AccountContextMiddleware sets request.account (JWT/API key), validates account status/plan + -> DRF Auth: APIKey/JWT/Session + -> Permissions: IsAuthenticatedAndActive + HasTenantAccess (+ role-specific) + -> ViewSet: + AccountModelViewSet filters by account + SiteSectorModelViewSet filters by account + site/sector + SiteUserAccess + -> Action-specific role checks (IsEditorOrAbove, IsAdminOrOwner, IsSystemAccountOrDeveloper) + -> Business logic (services) + credit checks (billing) + -> Response +``` + +## Module Access (backend ViewSets & guards) +- Accounts/Users/Plans/Subscriptions: + - `auth/views.py`: `UsersViewSet`, `AccountsViewSet`, `SubscriptionsViewSet`, `SiteUserAccessViewSet` (account-scoped via AccountModelViewSet + role guards). + - Roles: owner/admin (or developer/system) can manage; others limited to self (UsersViewSet get_queryset). +- Sites/Sectors: + - `auth/views.py` (`SiteViewSet`, `Sector` actions): SiteSectorModelViewSet enforces account + site/sector + SiteUserAccess; public slug read is AllowAny for active site slug only. +- Planner: + - `modules/planner/views.py` (KeywordViewSet, ClusterViewSet, ContentIdeasViewSet) inherit SiteSectorModelViewSet; require site_id/sector_id; role: typically editor+ for writes. +- Writer: + - `modules/writer/views.py` (TasksViewSet, ContentViewSet, ImagesViewSet, ContentTaxonomyViewSet) inherit SiteSectorModelViewSet; site/sector scoping; editor+ for writes. +- Automation: + - `business/automation/views.py` (AutomationViewSet) inherits AccountModelViewSet/SiteSectorModelViewSet patterns; requires site_id for run/config; role: editor+ for mutate. +- System settings (non-integrations): + - `modules/system/views.py` / `settings_views.py`: AccountModelViewSet; role usually admin/owner; authenticated + tenant required. +- Integrations (OpenAI/Runware API keys): + - `modules/system/integration_views.py`: guarded by `IsSystemAccountOrDeveloper` (system account or developer only); tenant-scoped but effectively system-only for keys. +- Billing: + - `modules/billing/views.py`: AccountModelViewSet; `IsAdminOrOwner` for credit transactions/payment methods; balance/usage requires auth + tenant. +- Payments/Payment Methods: + - Payment methods: `AccountPaymentMethodViewSet` account-scoped; IsAuthenticated; default selection per account; admin/owner should manage. + - Payments: `PaymentViewSet` account-scoped; IsAuthenticated; list/available_methods/manual payment for current account only. + +## Frontend Guards +- Route protection: `ProtectedRoute` (auth required, checks account/plan/payment methods), `ModuleGuard` (module enabled), `AdminGuard` (integration/admin pages only for system account or developer). +- Sidebar hides Integration for non-system/developer; admin section shown only for system/developer. + +## AI Key Resolution +- `ai/ai_core.py` `_load_account_settings`: tries tenant IntegrationSettings → system account IntegrationSettings (`aws-admin`/`default-account`/`default`) → Django settings (`OPENAI_API_KEY`, `RUNWARE_API_KEY`). All users run AI with shared keys if tenant keys absent. + +## Throttling +- `api/throttles.py` `DebugScopedRateThrottle`: bypass for authenticated users/system/debug; per-scope rates in `settings.py`. Prevents 429s for normal users. + +## Payment / Billing Workflow (happy path) +1) User authenticates (JWT) → request.account set. +2) Payment methods (account-scoped) fetched via `/v1/billing/payment-methods/available/`; admin/owner can CRUD `/v1/billing/payment-methods/`. +3) Invoices/Payments via billing endpoints (account-scoped; admin/owner). +4) Credits used via CreditService on AI/automation calls (backend). + +## Access Summary by Role (runtime enforcement) +- Viewer: read-only where viewsets allow `IsViewerOrAbove`; no writes. +- Editor: can write planner/writer/automation; cannot manage billing/integration. +- Admin/Owner: manage account/team/billing/payment methods; full module writes. +- Developer/System account: cross-tenant overrides in some base filters; integration settings and admin menus. + +## Key Files (with line bands) +- Middleware: `auth/middleware.py` (~L1-220) +- Base viewsets: `api/base.py` (~L1-430) +- Permissions: `api/permissions.py` (~L1-200), `auth/permissions.py` (~L1-120) +- Settings (REST/Throttle): `settings.py` (REST_FRAMEWORK block, ~L200-360) +- AI core key loading: `ai/ai_core.py` (~L1-120) +- Integration settings views: `modules/system/integration_views.py` (~L1-300 main guards; actions throughout) +- Planner views: `modules/planner/views.py` (all ViewSets inherit SiteSectorModelViewSet) +- Writer views: `modules/writer/views.py` +- Automation: `business/automation/views.py`, `services/automation_service.py` +- Billing: `modules/billing/views.py`, `business/billing/services/credit_service.py` +- Payment methods: `modules/billing/views.py` AccountPaymentMethodViewSet +- Frontend guards: `src/components/auth/ProtectedRoute.tsx`, `src/components/auth/AdminGuard.tsx`, `src/components/common/ModuleGuard.tsx` +- Sidebar gating: `src/layout/AppSidebar.tsx` + +## Open Items / Risks +- Ensure public endpoints explicitly override default permissions (e.g., auth register/login, site slug read). +- Validate all viewsets still inherit AccountModelViewSet/SiteSectorModelViewSet after future changes. +- Add automated tests for cross-tenant denial, role gates, plan limits, and integration access.*** +