# -*- coding: utf-8 -*-
"""
Adtlas Activities Celery Tasks

This module contains Celery tasks for the activities app,
providing asynchronous processing for activity logging,
data aggregation, cleanup, and analytics.

Features:
    - Asynchronous activity logging
    - Batch activity processing
    - Activity summary generation
    - Automated cleanup tasks
    - Analytics calculations
    - Report generation
    - Security monitoring

Author: Adtlas Development Team
Version: 1.0.0
Last Updated: 2025-01-27
"""

import logging
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional

from celery import shared_task, group, chord
from django.conf import settings
from django.utils import timezone
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.contrib.auth import get_user_model
from django.db import transaction
from django.core.cache import cache

from .models import Activity, ActivityCategory, ActivitySummary
from .utils import (
    get_activity_statistics, cleanup_old_activities,
    get_security_alerts, export_activities_to_csv
)
from .signals import activity_cleanup_completed

# Get the user model
User = get_user_model()

# Set up logging
logger = logging.getLogger(__name__)


@shared_task(bind=True, max_retries=3)
def log_activity_async(self, user_id: Optional[int], action: str, description: str,
                      category_id: Optional[int] = None, content_type_id: Optional[int] = None,
                      object_id: Optional[str] = None, ip_address: Optional[str] = None,
                      user_agent: Optional[str] = None, is_successful: bool = True,
                      duration_ms: Optional[float] = None, error_message: Optional[str] = None,
                      metadata: Optional[Dict[str, Any]] = None) -> str:
    """
    Log an activity asynchronously.
    
    This task creates an activity record in the background,
    useful for high-traffic scenarios where synchronous logging
    might impact performance.
    
    Args:
        user_id: User ID (optional)
        action: Activity action
        description: Activity description
        category_id: Category ID (optional)
        content_type_id: Content type ID (optional)
        object_id: Object ID (optional)
        ip_address: IP address (optional)
        user_agent: User agent (optional)
        is_successful: Success status
        duration_ms: Duration in milliseconds (optional)
        error_message: Error message (optional)
        metadata: Additional metadata (optional)
    
    Returns:
        str: Activity ID
    """
    try:
        # Create activity
        activity = Activity.objects.create(
            user_id=user_id,
            action=action,
            description=description,
            category_id=category_id,
            content_type_id=content_type_id,
            object_id=object_id,
            ip_address=ip_address,
            user_agent=user_agent,
            is_successful=is_successful,
            duration_ms=duration_ms,
            error_message=error_message,
            metadata=metadata or {}
        )
        
        logger.info(f"Activity logged asynchronously: {activity.id}")
        return str(activity.id)
        
    except Exception as exc:
        logger.error(f"Failed to log activity asynchronously: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def log_activities_bulk(self, activities_data: List[Dict[str, Any]]) -> List[str]:
    """
    Log multiple activities in bulk asynchronously.
    
    This task creates multiple activity records efficiently
    using bulk operations.
    
    Args:
        activities_data: List of activity data dictionaries
    
    Returns:
        list: List of activity IDs
    """
    try:
        activities = []
        
        for data in activities_data:
            activity = Activity(
                user_id=data.get('user_id'),
                action=data['action'],
                description=data['description'],
                category_id=data.get('category_id'),
                content_type_id=data.get('content_type_id'),
                object_id=data.get('object_id'),
                ip_address=data.get('ip_address'),
                user_agent=data.get('user_agent'),
                is_successful=data.get('is_successful', True),
                duration_ms=data.get('duration_ms'),
                error_message=data.get('error_message'),
                metadata=data.get('metadata', {})
            )
            activities.append(activity)
        
        # Bulk create activities
        created_activities = Activity.objects.bulk_create(activities)
        activity_ids = [str(activity.id) for activity in created_activities]
        
        logger.info(f"Bulk logged {len(activity_ids)} activities")
        return activity_ids
        
    except Exception as exc:
        logger.error(f"Failed to bulk log activities: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def update_activity_summary(self, date: str, user_id: Optional[int] = None,
                           category_id: Optional[int] = None) -> bool:
    """
    Update activity summary for a specific date.
    
    This task calculates and updates activity statistics
    for a given date, user, and/or category.
    
    Args:
        date: Date string (YYYY-MM-DD)
        user_id: User ID (optional)
        category_id: Category ID (optional)
    
    Returns:
        bool: Success status
    """
    try:
        # Parse date
        summary_date = datetime.strptime(date, '%Y-%m-%d').date()
        
        with transaction.atomic():
            # Get or create summary
            summary, created = ActivitySummary.objects.get_or_create(
                date=summary_date,
                user_id=user_id,
                category_id=category_id
            )
            
            # Build activity query
            activities = Activity.objects.filter(
                created_at__date=summary_date
            )
            
            if user_id:
                activities = activities.filter(user_id=user_id)
            
            if category_id:
                activities = activities.filter(category_id=category_id)
            
            # Calculate statistics
            summary.total_activities = activities.count()
            summary.successful_activities = activities.filter(is_successful=True).count()
            summary.failed_activities = activities.filter(is_successful=False).count()
            
            # Calculate average duration
            durations = activities.filter(
                duration_ms__isnull=False
            ).values_list('duration_ms', flat=True)
            
            if durations:
                summary.avg_duration_ms = sum(durations) / len(durations)
            else:
                summary.avg_duration_ms = 0
            
            # Count unique IPs
            summary.unique_ips = activities.values('ip_address').distinct().count()
            
            # Update metadata
            summary.metadata = {
                'last_updated': timezone.now().isoformat(),
                'calculation_method': 'celery_task'
            }
            
            summary.save()
            
            logger.info(f"Activity summary updated for {date} (user: {user_id}, category: {category_id})")
            return True
            
    except Exception as exc:
        logger.error(f"Failed to update activity summary: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        return False


@shared_task(bind=True, max_retries=3)
def update_activity_summaries_bulk(self, summary_keys: List[tuple]) -> int:
    """
    Update multiple activity summaries in bulk.
    
    Args:
        summary_keys: List of tuples (date, user_id, category_id)
    
    Returns:
        int: Number of summaries updated
    """
    try:
        updated_count = 0
        
        for date_str, user_id, category_id in summary_keys:
            success = update_activity_summary.apply(
                args=[date_str, user_id, category_id]
            ).get()
            
            if success:
                updated_count += 1
        
        logger.info(f"Bulk updated {updated_count} activity summaries")
        return updated_count
        
    except Exception as exc:
        logger.error(f"Failed to bulk update activity summaries: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        return 0


@shared_task(bind=True, max_retries=3)
def cleanup_old_activities_task(self, days_to_keep: int = 90) -> Dict[str, Any]:
    """
    Clean up old activities based on retention policy.
    
    This task removes activities older than the specified
    number of days to maintain database performance.
    
    Args:
        days_to_keep: Number of days to keep activities
    
    Returns:
        dict: Cleanup results
    """
    try:
        start_time = timezone.now()
        
        # Perform cleanup
        deleted_count = cleanup_old_activities(days_to_keep)
        
        end_time = timezone.now()
        duration = (end_time - start_time).total_seconds()
        
        # Send cleanup completed signal
        activity_cleanup_completed.send(
            sender=self.__class__,
            deleted_count=deleted_count
        )
        
        result = {
            'deleted_count': deleted_count,
            'days_to_keep': days_to_keep,
            'duration_seconds': duration,
            'completed_at': end_time.isoformat()
        }
        
        logger.info(f"Activity cleanup completed: {deleted_count} activities deleted")
        return result
        
    except Exception as exc:
        logger.error(f"Failed to cleanup old activities: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def generate_daily_activity_report(self, date: str) -> Dict[str, Any]:
    """
    Generate daily activity report.
    
    This task creates a comprehensive report of activities
    for a specific date.
    
    Args:
        date: Date string (YYYY-MM-DD)
    
    Returns:
        dict: Activity report data
    """
    try:
        # Parse date
        report_date = datetime.strptime(date, '%Y-%m-%d').date()
        
        # Get activities for the date
        activities = Activity.objects.filter(
            created_at__date=report_date
        )
        
        # Calculate overall statistics
        overall_stats = get_activity_statistics(
            start_date=timezone.make_aware(datetime.combine(report_date, datetime.min.time())),
            end_date=timezone.make_aware(datetime.combine(report_date, datetime.max.time()))
        )
        
        # Get top users
        top_users = list(activities.filter(
            user__isnull=False
        ).values(
            'user__email', 'user__first_name', 'user__last_name'
        ).annotate(
            activity_count=Count('id')
        ).order_by('-activity_count')[:10])
        
        # Get category breakdown
        category_breakdown = list(activities.values(
            'category__name', 'category__code'
        ).annotate(
            count=Count('id')
        ).order_by('-count'))
        
        # Get hourly distribution
        hourly_distribution = list(activities.extra(
            select={'hour': 'EXTRACT(hour FROM created_at)'}
        ).values('hour').annotate(
            count=Count('id')
        ).order_by('hour'))
        
        # Get error summary
        error_activities = activities.filter(is_successful=False)
        error_summary = {
            'total_errors': error_activities.count(),
            'top_errors': list(error_activities.values(
                'error_message'
            ).annotate(
                count=Count('id')
            ).order_by('-count')[:5])
        }
        
        report = {
            'date': date,
            'overall_stats': overall_stats,
            'top_users': top_users,
            'category_breakdown': category_breakdown,
            'hourly_distribution': hourly_distribution,
            'error_summary': error_summary,
            'generated_at': timezone.now().isoformat()
        }
        
        # Cache the report
        cache_key = f'daily_activity_report_{date}'
        cache.set(cache_key, report, 86400)  # Cache for 24 hours
        
        logger.info(f"Daily activity report generated for {date}")
        return report
        
    except Exception as exc:
        logger.error(f"Failed to generate daily activity report: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def generate_user_activity_report(self, user_id: int, days: int = 30) -> Dict[str, Any]:
    """
    Generate user activity report.
    
    This task creates a comprehensive report of activities
    for a specific user over a given period.
    
    Args:
        user_id: User ID
        days: Number of days to include in the report
    
    Returns:
        dict: User activity report data
    """
    try:
        from .utils import get_user_activity_summary
        
        # Get user
        user = User.objects.get(id=user_id)
        
        # Generate activity summary
        summary = get_user_activity_summary(user_id, days)
        
        # Add user information
        summary['user'] = {
            'id': user.id,
            'email': user.email,
            'full_name': user.get_full_name(),
            'is_active': user.is_active,
            'date_joined': user.date_joined.isoformat()
        }
        
        summary['generated_at'] = timezone.now().isoformat()
        
        # Cache the report
        cache_key = f'user_activity_report_{user_id}_{days}'
        cache.set(cache_key, summary, 3600)  # Cache for 1 hour
        
        logger.info(f"User activity report generated for user {user_id}")
        return summary
        
    except User.DoesNotExist:
        logger.error(f"User {user_id} not found for activity report")
        return {}
        
    except Exception as exc:
        logger.error(f"Failed to generate user activity report: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def monitor_security_alerts(self, hours: int = 24) -> List[Dict[str, Any]]:
    """
    Monitor and process security alerts.
    
    This task checks for security-related activities
    and generates alerts for suspicious behavior.
    
    Args:
        hours: Number of hours to check for alerts
    
    Returns:
        list: List of security alerts
    """
    try:
        # Get security alerts
        alerts = get_security_alerts(hours)
        
        # Process high-severity alerts
        high_severity_alerts = [alert for alert in alerts if alert['severity'] == 'high']
        
        if high_severity_alerts:
            # Send email notifications for high-severity alerts
            send_security_alert_email.delay(high_severity_alerts)
        
        # Cache alerts for dashboard
        cache_key = f'security_alerts_{hours}h'
        cache.set(cache_key, alerts, 3600)  # Cache for 1 hour
        
        logger.info(f"Security monitoring completed: {len(alerts)} alerts found")
        return alerts
        
    except Exception as exc:
        logger.error(f"Failed to monitor security alerts: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def send_security_alert_email(self, alerts: List[Dict[str, Any]]) -> bool:
    """
    Send security alert email notifications.
    
    Args:
        alerts: List of security alerts
    
    Returns:
        bool: Success status
    """
    try:
        # Get admin email addresses
        admin_emails = getattr(settings, 'SECURITY_ALERT_EMAILS', [])
        
        if not admin_emails:
            logger.warning("No admin emails configured for security alerts")
            return False
        
        # Prepare email content
        subject = f"Security Alert: {len(alerts)} suspicious activities detected"
        
        context = {
            'alerts': alerts,
            'alert_count': len(alerts),
            'timestamp': timezone.now()
        }
        
        # Render email template
        html_message = render_to_string('activities/emails/security_alert.html', context)
        plain_message = render_to_string('activities/emails/security_alert.txt', context)
        
        # Send email
        send_mail(
            subject=subject,
            message=plain_message,
            html_message=html_message,
            from_email=settings.DEFAULT_FROM_EMAIL,
            recipient_list=admin_emails,
            fail_silently=False
        )
        
        logger.info(f"Security alert email sent to {len(admin_emails)} recipients")
        return True
        
    except Exception as exc:
        logger.error(f"Failed to send security alert email: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        return False


@shared_task(bind=True, max_retries=3)
def export_activities_task(self, user_id: int, filters: Dict[str, Any],
                          format: str = 'csv') -> Dict[str, Any]:
    """
    Export activities to file asynchronously.
    
    This task exports activities based on filters and
    sends the result to the requesting user.
    
    Args:
        user_id: User ID requesting the export
        filters: Activity filters
        format: Export format ('csv' or 'json')
    
    Returns:
        dict: Export result information
    """
    try:
        from django.core.files.base import ContentFile
        from django.core.files.storage import default_storage
        
        # Get user
        user = User.objects.get(id=user_id)
        
        # Build activity queryset
        activities = Activity.objects.all()
        
        # Apply filters
        if filters.get('start_date'):
            activities = activities.filter(created_at__gte=filters['start_date'])
        
        if filters.get('end_date'):
            activities = activities.filter(created_at__lte=filters['end_date'])
        
        if filters.get('user_id'):
            activities = activities.filter(user_id=filters['user_id'])
        
        if filters.get('category_id'):
            activities = activities.filter(category_id=filters['category_id'])
        
        if filters.get('is_successful') is not None:
            activities = activities.filter(is_successful=filters['is_successful'])
        
        # Export data
        if format == 'csv':
            content = export_activities_to_csv(activities)
            filename = f'activities_export_{timezone.now().strftime("%Y%m%d_%H%M%S")}.csv'
            content_type = 'text/csv'
        else:
            from .utils import export_activities_to_json
            content = export_activities_to_json(activities)
            filename = f'activities_export_{timezone.now().strftime("%Y%m%d_%H%M%S")}.json'
            content_type = 'application/json'
        
        # Save file
        file_path = f'exports/activities/{filename}'
        saved_path = default_storage.save(file_path, ContentFile(content.encode('utf-8')))
        
        result = {
            'filename': filename,
            'file_path': saved_path,
            'file_size': len(content),
            'record_count': activities.count(),
            'format': format,
            'user_id': user_id,
            'created_at': timezone.now().isoformat()
        }
        
        # Send notification email
        send_export_notification_email.delay(user.email, result)
        
        logger.info(f"Activity export completed for user {user_id}: {filename}")
        return result
        
    except User.DoesNotExist:
        logger.error(f"User {user_id} not found for activity export")
        return {}
        
    except Exception as exc:
        logger.error(f"Failed to export activities: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        raise exc


@shared_task(bind=True, max_retries=3)
def send_export_notification_email(self, email: str, export_info: Dict[str, Any]) -> bool:
    """
    Send export completion notification email.
    
    Args:
        email: User email address
        export_info: Export information
    
    Returns:
        bool: Success status
    """
    try:
        subject = "Activity Export Completed"
        
        context = {
            'export_info': export_info,
            'download_url': f"{settings.SITE_URL}/activities/exports/{export_info['filename']}"
        }
        
        # Render email template
        html_message = render_to_string('activities/emails/export_notification.html', context)
        plain_message = render_to_string('activities/emails/export_notification.txt', context)
        
        # Send email
        send_mail(
            subject=subject,
            message=plain_message,
            html_message=html_message,
            from_email=settings.DEFAULT_FROM_EMAIL,
            recipient_list=[email],
            fail_silently=False
        )
        
        logger.info(f"Export notification email sent to {email}")
        return True
        
    except Exception as exc:
        logger.error(f"Failed to send export notification email: {str(exc)}")
        
        # Retry the task
        if self.request.retries < self.max_retries:
            raise self.retry(countdown=60 * (2 ** self.request.retries))
        
        return False


# Periodic tasks (configured in Celery Beat)
@shared_task
def daily_activity_summary_task():
    """
    Daily task to update activity summaries.
    
    This task runs daily to update activity summaries
    for the previous day.
    """
    yesterday = (timezone.now() - timedelta(days=1)).date()
    
    # Update overall summary
    update_activity_summary.delay(yesterday.strftime('%Y-%m-%d'))
    
    # Update category summaries
    categories = ActivityCategory.objects.filter(is_active=True)
    for category in categories:
        update_activity_summary.delay(
            yesterday.strftime('%Y-%m-%d'),
            category_id=category.id
        )
    
    # Generate daily report
    generate_daily_activity_report.delay(yesterday.strftime('%Y-%m-%d'))
    
    logger.info(f"Daily activity summary task completed for {yesterday}")


@shared_task
def weekly_cleanup_task():
    """
    Weekly task to clean up old activities.
    
    This task runs weekly to remove old activities
    based on the retention policy.
    """
    retention_days = getattr(settings, 'ACTIVITIES_RETENTION_DAYS', 90)
    cleanup_old_activities_task.delay(retention_days)
    
    logger.info("Weekly cleanup task initiated")


@shared_task
def hourly_security_monitoring_task():
    """
    Hourly task to monitor security alerts.
    
    This task runs hourly to check for security-related
    activities and generate alerts.
    """
    monitor_security_alerts.delay(hours=1)
    
    logger.info("Hourly security monitoring task completed")