ai fucntiosn adn otehr atuoamtion fixes
This commit is contained in:
@@ -432,7 +432,8 @@ class AIEngine:
|
||||
final_save_msg = save_msg
|
||||
|
||||
# Phase 5.5: DEDUCT CREDITS - Deduct credits after successful save
|
||||
if self.account and raw_response:
|
||||
logger.info(f"[AIEngine] Credit deduction check: account={self.account is not None}, raw_response={raw_response is not None}")
|
||||
if self.account and raw_response is not None:
|
||||
try:
|
||||
from igny8_core.business.billing.services.credit_service import CreditService
|
||||
from igny8_core.business.billing.exceptions import InsufficientCreditsError
|
||||
@@ -444,6 +445,12 @@ class AIEngine:
|
||||
tokens_input = raw_response.get('input_tokens', 0)
|
||||
tokens_output = raw_response.get('output_tokens', 0)
|
||||
|
||||
logger.info(
|
||||
f"[AIEngine] Deducting credits: operation={operation_type}, "
|
||||
f"tokens_in={tokens_input}, tokens_out={tokens_output}, "
|
||||
f"model={raw_response.get('model', 'unknown')}"
|
||||
)
|
||||
|
||||
# Extract site_id from save_result (could be from content, cluster, or task)
|
||||
site_id = save_result.get('site_id') or save_result.get('site')
|
||||
|
||||
@@ -468,15 +475,17 @@ class AIEngine:
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"[AIEngine] Credits deducted: {operation_type}, "
|
||||
f"tokens: {tokens_input + tokens_output} ({tokens_input} in, {tokens_output} out)"
|
||||
f"[AIEngine] Credits deducted successfully: {operation_type}, "
|
||||
f"total tokens: {tokens_input + tokens_output} ({tokens_input} in, {tokens_output} out)"
|
||||
)
|
||||
except InsufficientCreditsError as e:
|
||||
# This shouldn't happen since we checked before, but log it
|
||||
logger.error(f"[AIEngine] Insufficient credits during deduction: {e}")
|
||||
except Exception as e:
|
||||
logger.warning(f"[AIEngine] Failed to deduct credits: {e}", exc_info=True)
|
||||
logger.error(f"[AIEngine] Failed to deduct credits: {e}", exc_info=True)
|
||||
# Don't fail the operation if credit deduction fails (for backward compatibility)
|
||||
else:
|
||||
logger.warning(f"[AIEngine] Skipping credit deduction: account={self.account is not None}, raw_response={raw_response is not None}")
|
||||
|
||||
# Phase 6: DONE - Finalization (98-100%)
|
||||
done_msg = self._get_done_message(function_name, save_result)
|
||||
|
||||
@@ -68,7 +68,22 @@ class AutoClusterFunction(BaseAIFunction):
|
||||
f"[AutoCluster] Validation passed: {min_validation['count']} keywords available (min: {min_validation['required']})"
|
||||
)
|
||||
|
||||
# Removed plan limits check
|
||||
# Validate single sector - keywords must all belong to the same sector
|
||||
keywords = Keywords.objects.filter(id__in=ids)
|
||||
if account:
|
||||
keywords = keywords.filter(account=account)
|
||||
|
||||
sector_ids = set(keywords.values_list('sector_id', flat=True))
|
||||
# Remove None values
|
||||
sector_ids.discard(None)
|
||||
|
||||
if len(sector_ids) > 1:
|
||||
logger.warning(f"[AutoCluster] Validation failed: keywords span {len(sector_ids)} sectors")
|
||||
return {
|
||||
'valid': False,
|
||||
'error': f'Keywords must be from a single sector. Selected keywords span {len(sector_ids)} different sectors. Please filter by sector first.',
|
||||
'sector_count': len(sector_ids)
|
||||
}
|
||||
|
||||
return {'valid': True}
|
||||
|
||||
@@ -216,23 +231,23 @@ class AutoClusterFunction(BaseAIFunction):
|
||||
if not keywords:
|
||||
raise ValueError("No keywords available for saving")
|
||||
|
||||
# Get context from first keyword (account/site/sector already validated at page level)
|
||||
# Get context from first keyword (account/site already validated at page level)
|
||||
first_keyword = keywords[0]
|
||||
account = account or first_keyword.account
|
||||
site = first_keyword.site
|
||||
|
||||
# Get sector if needed
|
||||
from igny8_core.auth.models import Sector
|
||||
sector = first_keyword.sector
|
||||
if not sector and sector_id:
|
||||
try:
|
||||
sector = Sector.objects.get(id=sector_id)
|
||||
except Sector.DoesNotExist:
|
||||
sector = None
|
||||
|
||||
if not account:
|
||||
raise ValueError("Account is required for cluster creation")
|
||||
|
||||
# Build a lookup of keyword text -> keyword object for matching
|
||||
# Keywords may span multiple sectors, so don't filter by sector here
|
||||
keyword_by_text = {
|
||||
kw_obj.keyword.strip().lower(): kw_obj
|
||||
for kw_obj in keywords
|
||||
}
|
||||
|
||||
logger.info(f"[save_output] Processing {len(parsed)} clusters for {len(keywords)} keywords")
|
||||
|
||||
clusters_created = 0
|
||||
keywords_updated = 0
|
||||
|
||||
@@ -253,74 +268,88 @@ class AutoClusterFunction(BaseAIFunction):
|
||||
cluster_keywords = cluster_data.get('keywords', [])
|
||||
|
||||
if not cluster_name or not cluster_keywords:
|
||||
logger.warning(f"[save_output] Skipping cluster with empty name or keywords: {cluster_data}")
|
||||
continue
|
||||
|
||||
# Get or create cluster
|
||||
if sector:
|
||||
cluster, created = Clusters.objects.get_or_create(
|
||||
name=cluster_name,
|
||||
# Match keywords from AI response to actual keyword objects
|
||||
matched_keyword_objects = []
|
||||
for kw_text in cluster_keywords:
|
||||
kw_normalized = kw_text.strip().lower()
|
||||
if kw_normalized in keyword_by_text:
|
||||
matched_keyword_objects.append(keyword_by_text[kw_normalized])
|
||||
|
||||
if not matched_keyword_objects:
|
||||
logger.warning(f"[save_output] No keywords matched for cluster '{cluster_name}': {cluster_keywords}")
|
||||
continue
|
||||
|
||||
# Determine sector for cluster from the matched keywords
|
||||
# Use the sector from the first matched keyword (all should ideally be same sector)
|
||||
cluster_sector = matched_keyword_objects[0].sector
|
||||
|
||||
# Try to find existing cluster by name (case-insensitive) in same site/sector
|
||||
# This allows reusing clusters even if AI generates slightly different casing
|
||||
existing_cluster = None
|
||||
if cluster_sector:
|
||||
existing_cluster = Clusters.objects.filter(
|
||||
account=account,
|
||||
site=site,
|
||||
sector=sector,
|
||||
defaults={
|
||||
'description': cluster_data.get('description', ''),
|
||||
'status': 'new', # FIXED: Changed from 'active' to 'new'
|
||||
}
|
||||
)
|
||||
sector=cluster_sector,
|
||||
name__iexact=cluster_name,
|
||||
deleted_at__isnull=True # Exclude soft-deleted clusters
|
||||
).first()
|
||||
else:
|
||||
cluster, created = Clusters.objects.get_or_create(
|
||||
name=cluster_name,
|
||||
existing_cluster = Clusters.objects.filter(
|
||||
account=account,
|
||||
site=site,
|
||||
sector__isnull=True,
|
||||
defaults={
|
||||
'description': cluster_data.get('description', ''),
|
||||
'status': 'new', # FIXED: Changed from 'active' to 'new'
|
||||
'sector': None,
|
||||
}
|
||||
name__iexact=cluster_name,
|
||||
deleted_at__isnull=True
|
||||
).first()
|
||||
|
||||
if existing_cluster:
|
||||
cluster = existing_cluster
|
||||
created = False
|
||||
logger.info(f"[save_output] Found existing cluster '{cluster.name}' (id={cluster.id})")
|
||||
else:
|
||||
# Create new cluster
|
||||
cluster = Clusters.objects.create(
|
||||
name=cluster_name,
|
||||
account=account,
|
||||
site=site,
|
||||
sector=cluster_sector,
|
||||
description=cluster_data.get('description', ''),
|
||||
status='new',
|
||||
)
|
||||
created = True
|
||||
|
||||
if created:
|
||||
clusters_created += 1
|
||||
logger.info(f"[save_output] Created cluster '{cluster_name}' (id={cluster.id}) in sector {cluster_sector.id if cluster_sector else 'None'}")
|
||||
|
||||
# Match and assign keywords (case-insensitive)
|
||||
cluster_keywords_normalized = {kw.strip().lower(): kw.strip() for kw in cluster_keywords}
|
||||
available_keywords_normalized = {
|
||||
kw_obj.keyword.strip().lower(): kw_obj
|
||||
for kw_obj in keywords
|
||||
}
|
||||
|
||||
matched_keyword_objects = []
|
||||
for cluster_kw_normalized, cluster_kw_original in cluster_keywords_normalized.items():
|
||||
if cluster_kw_normalized in available_keywords_normalized:
|
||||
matched_keyword_objects.append(available_keywords_normalized[cluster_kw_normalized])
|
||||
|
||||
# Update matched keywords
|
||||
if matched_keyword_objects:
|
||||
matched_ids = [kw.id for kw in matched_keyword_objects]
|
||||
keyword_filter = Keywords.objects.filter(
|
||||
id__in=matched_ids,
|
||||
account=account
|
||||
)
|
||||
if sector:
|
||||
keyword_filter = keyword_filter.filter(sector=sector)
|
||||
else:
|
||||
keyword_filter = keyword_filter.filter(sector__isnull=True)
|
||||
|
||||
# FIXED: Ensure keywords status updates from 'new' to 'mapped'
|
||||
updated_count = keyword_filter.update(
|
||||
cluster=cluster,
|
||||
status='mapped' # Status changes from 'new' to 'mapped'
|
||||
)
|
||||
keywords_updated += updated_count
|
||||
# Update matched keywords - directly by their IDs, no sector filtering needed
|
||||
# since we already matched them from the input keywords list
|
||||
matched_ids = [kw.id for kw in matched_keyword_objects]
|
||||
updated_count = Keywords.objects.filter(
|
||||
id__in=matched_ids,
|
||||
account=account
|
||||
).update(
|
||||
cluster=cluster,
|
||||
status='mapped'
|
||||
)
|
||||
keywords_updated += updated_count
|
||||
logger.info(f"[save_output] Cluster '{cluster_name}': matched {len(matched_keyword_objects)} keywords, updated {updated_count}")
|
||||
|
||||
# Recalculate cluster metrics
|
||||
# Recalculate cluster metrics for all clusters in this site
|
||||
from django.db.models import Sum, Case, When, F, IntegerField
|
||||
cluster_filter = Clusters.objects.filter(account=account)
|
||||
if sector:
|
||||
cluster_filter = cluster_filter.filter(sector=sector)
|
||||
else:
|
||||
cluster_filter = cluster_filter.filter(sector__isnull=True)
|
||||
|
||||
# Get all cluster IDs that were created/updated in this batch
|
||||
updated_cluster_ids = set()
|
||||
for kw in keywords:
|
||||
if kw.cluster_id:
|
||||
updated_cluster_ids.add(kw.cluster_id)
|
||||
|
||||
# Also include newly created clusters
|
||||
cluster_filter = Clusters.objects.filter(account=account, site=site)
|
||||
|
||||
for cluster in cluster_filter:
|
||||
cluster.keywords_count = Keywords.objects.filter(cluster=cluster).count()
|
||||
|
||||
Reference in New Issue
Block a user