# -*- coding: utf-8 -*-
"""
Analytics Views
===============

Django REST Framework views for the Adtlas Analytics module.
These views handle API endpoints for analytics data retrieval, reporting,
and dashboard functionality for the Dynamic Ad Insertion (DAI) system.

View Classes:
- SfrAnalyticsViewSet: SFR analytics data CRUD operations
- BouyguesAnalyticsViewSet: Bouygues analytics data CRUD operations
- ImpressionViewSet: Impression tracking and retrieval
- VastResponseViewSet: VAST response data management
- PerformanceMetricViewSet: Performance metrics aggregation
- AnalyticsReportViewSet: Report generation and management
- AnalyticsDashboardView: Dashboard data aggregation
- AnalyticsExportView: Data export functionality

Key Features:
- RESTful API endpoints
- Advanced filtering and search
- Data aggregation and analytics
- Real-time data processing
- Export capabilities
- Performance optimization
- Security and access control
- Caching for improved performance

API Endpoints:
- /api/analytics/sfr/ - SFR analytics data
- /api/analytics/bouygues/ - Bouygues analytics data
- /api/analytics/impressions/ - Impression tracking
- /api/analytics/vast-responses/ - VAST response data
- /api/analytics/performance/ - Performance metrics
- /api/analytics/reports/ - Report management
- /api/analytics/dashboard/ - Dashboard data
- /api/analytics/export/ - Data export

Author: Adtlas Development Team
Version: 1.0.0
Last Updated: 2024
"""

from rest_framework import viewsets, status, permissions
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import SearchFilter, OrderingFilter
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
from django.db.models import Q, Sum, Avg, Count, Max, Min
from django.db.models.functions import TruncDate, TruncHour
from django.core.cache import cache
from django.http import HttpResponse
from django.conf import settings
import csv
import json
from datetime import datetime, timedelta
from decimal import Decimal

from .models import (
    SfrAnalytics,
    BouyguesAnalytics,
    Impression,
    VastResponse,
    PerformanceMetric,
    AnalyticsReport
)
from .serializers import (
    SfrAnalyticsSerializer,
    BouyguesAnalyticsSerializer,
    ImpressionSerializer,
    VastResponseSerializer,
    PerformanceMetricSerializer,
    AnalyticsReportSerializer,
    AnalyticsDashboardSerializer,
    AnalyticsExportSerializer
)
from .filters import (
    SfrAnalyticsFilter,
    BouyguesAnalyticsFilter,
    ImpressionFilter,
    VastResponseFilter,
    PerformanceMetricFilter
)
from .permissions import AnalyticsPermission
from .services import AnalyticsService, ReportGenerator, DataExporter


class BaseAnalyticsViewSet(viewsets.ModelViewSet):
    """
    Base viewset for analytics models.
    
    Provides common functionality for all analytics viewsets:
    - Standard CRUD operations
    - Filtering and search
    - Pagination
    - Caching
    - Permission handling
    
    Attributes:
        permission_classes: Required permissions
        filter_backends: Filtering backends
        search_fields: Fields available for search
        ordering_fields: Fields available for ordering
        ordering: Default ordering
    """
    
    permission_classes = [permissions.IsAuthenticated, AnalyticsPermission]
    filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
    ordering = ['-created_at']
    
    def get_queryset(self):
        """
        Get filtered queryset based on user permissions.
        
        Returns:
            QuerySet: Filtered queryset
        """
        queryset = super().get_queryset()
        
        # Filter by user's accessible campaigns/channels if not admin
        if not self.request.user.is_staff:
            # This would be implemented based on user permissions
            # For now, return all active records
            queryset = queryset.filter(is_active=True)
        
        return queryset
    
    def list(self, request, *args, **kwargs):
        """
        List analytics data with caching.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Paginated list of analytics data
        """
        # Generate cache key based on request parameters
        cache_key = self._generate_cache_key(request)
        
        # Try to get cached response
        cached_response = cache.get(cache_key)
        if cached_response:
            return Response(cached_response)
        
        # Get fresh data
        response = super().list(request, *args, **kwargs)
        
        # Cache the response for 5 minutes
        if response.status_code == 200:
            cache.set(cache_key, response.data, 300)
        
        return response
    
    def _generate_cache_key(self, request):
        """
        Generate cache key for request.
        
        Args:
            request: HTTP request
            
        Returns:
            str: Cache key
        """
        model_name = self.get_queryset().model.__name__.lower()
        params = sorted(request.GET.items())
        params_str = '&'.join([f"{k}={v}" for k, v in params])
        user_id = request.user.id
        
        return f"analytics_{model_name}_{user_id}_{hash(params_str)}"
    
    @action(detail=False, methods=['get'])
    def summary(self, request):
        """
        Get summary statistics for the analytics data.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Summary statistics
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        summary_data = {
            'total_records': queryset.count(),
            'date_range': {
                'start': queryset.aggregate(Min('created_at'))['created_at__min'],
                'end': queryset.aggregate(Max('created_at'))['created_at__max']
            }
        }
        
        return Response(summary_data)


class SfrAnalyticsViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for SFR Analytics data.
    
    Provides CRUD operations and analytics for SFR provider data
    including audience metrics, market share, and performance indicators.
    
    Endpoints:
        GET /api/analytics/sfr/ - List SFR analytics
        POST /api/analytics/sfr/ - Create SFR analytics record
        GET /api/analytics/sfr/{id}/ - Retrieve specific record
        PUT /api/analytics/sfr/{id}/ - Update record
        DELETE /api/analytics/sfr/{id}/ - Delete record
        GET /api/analytics/sfr/summary/ - Get summary statistics
        GET /api/analytics/sfr/trends/ - Get trend analysis
        GET /api/analytics/sfr/top-channels/ - Get top performing channels
    """
    
    queryset = SfrAnalytics.objects.all()
    serializer_class = SfrAnalyticsSerializer
    filterset_class = SfrAnalyticsFilter
    search_fields = ['channel__name', 'campaign__name']
    ordering_fields = ['date', 'hour', 'audience_count', 'market_share', 'rating']
    
    @action(detail=False, methods=['get'])
    def trends(self, request):
        """
        Get trend analysis for SFR analytics data.
        
        Args:
            request: HTTP request with date range parameters
            
        Returns:
            Response: Trend analysis data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Group by date and calculate daily averages
        trends = queryset.values('date').annotate(
            avg_audience=Avg('audience_count'),
            avg_market_share=Avg('market_share'),
            avg_rating=Avg('rating'),
            total_records=Count('id')
        ).order_by('date')
        
        return Response(list(trends))
    
    @action(detail=False, methods=['get'])
    def top_channels(self, request):
        """
        Get top performing channels based on SFR analytics.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Top channels data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Get top channels by average rating
        top_channels = queryset.values(
            'channel__name', 'channel__id'
        ).annotate(
            avg_rating=Avg('rating'),
            avg_market_share=Avg('market_share'),
            total_audience=Sum('audience_count')
        ).order_by('-avg_rating')[:10]
        
        return Response(list(top_channels))
    
    @action(detail=False, methods=['get'])
    def hourly_distribution(self, request):
        """
        Get hourly distribution of audience metrics.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Hourly distribution data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Group by hour and calculate averages
        hourly_data = queryset.values('hour').annotate(
            avg_audience=Avg('audience_count'),
            avg_rating=Avg('rating'),
            record_count=Count('id')
        ).order_by('hour')
        
        return Response(list(hourly_data))


class BouyguesAnalyticsViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for Bouygues Analytics data.
    
    Similar to SFR analytics but for Bouygues provider data
    with provider-specific metrics and analysis.
    
    Endpoints:
        GET /api/analytics/bouygues/ - List Bouygues analytics
        POST /api/analytics/bouygues/ - Create analytics record
        GET /api/analytics/bouygues/{id}/ - Retrieve specific record
        PUT /api/analytics/bouygues/{id}/ - Update record
        DELETE /api/analytics/bouygues/{id}/ - Delete record
        GET /api/analytics/bouygues/summary/ - Get summary statistics
        GET /api/analytics/bouygues/device-breakdown/ - Get device analysis
    """
    
    queryset = BouyguesAnalytics.objects.all()
    serializer_class = BouyguesAnalyticsSerializer
    filterset_class = BouyguesAnalyticsFilter
    search_fields = ['channel__name', 'campaign__name']
    ordering_fields = ['date', 'hour', 'viewers', 'share', 'rating_value']
    
    @action(detail=False, methods=['get'])
    def device_breakdown(self, request):
        """
        Get device breakdown analysis.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Device breakdown data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Aggregate device data from JSON fields
        device_data = {}
        for record in queryset:
            if record.device_data:
                for device, count in record.device_data.items():
                    device_data[device] = device_data.get(device, 0) + count
        
        # Convert to list format for response
        device_list = [
            {'device': device, 'count': count}
            for device, count in device_data.items()
        ]
        
        return Response(sorted(device_list, key=lambda x: x['count'], reverse=True))


class ImpressionViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for Impression tracking data.
    
    Handles individual ad impression tracking with detailed
    performance metrics and user interaction data.
    
    Endpoints:
        GET /api/analytics/impressions/ - List impressions
        POST /api/analytics/impressions/ - Create impression record
        GET /api/analytics/impressions/{id}/ - Retrieve specific impression
        GET /api/analytics/impressions/summary/ - Get summary statistics
        GET /api/analytics/impressions/completion-analysis/ - Completion analysis
        GET /api/analytics/impressions/geographic-distribution/ - Geographic data
    """
    
    queryset = Impression.objects.all()
    serializer_class = ImpressionSerializer
    filterset_class = ImpressionFilter
    search_fields = ['campaign__name', 'adspot__name', 'channel__name']
    ordering_fields = ['timestamp', 'duration', 'completion_rate']
    
    def create(self, request, *args, **kwargs):
        """
        Create impression with additional processing.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Created impression data
        """
        response = super().create(request, *args, **kwargs)
        
        if response.status_code == status.HTTP_201_CREATED:
            # Trigger real-time analytics processing
            impression_id = response.data['id']
            # This would trigger a Celery task for real-time processing
            # process_impression_analytics.delay(impression_id)
        
        return response
    
    @action(detail=False, methods=['get'])
    def completion_analysis(self, request):
        """
        Get completion rate analysis.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Completion analysis data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Calculate completion rate buckets
        completion_buckets = {
            '0-25%': queryset.filter(completion_rate__lt=25).count(),
            '25-50%': queryset.filter(completion_rate__gte=25, completion_rate__lt=50).count(),
            '50-75%': queryset.filter(completion_rate__gte=50, completion_rate__lt=75).count(),
            '75-100%': queryset.filter(completion_rate__gte=75).count(),
        }
        
        # Calculate additional metrics
        analysis = {
            'completion_buckets': completion_buckets,
            'average_completion_rate': queryset.aggregate(Avg('completion_rate'))['completion_rate__avg'],
            'total_completed': queryset.filter(completion_rate__gte=75).count(),
            'total_viewable': queryset.filter(completion_rate__gte=50, duration__gte=2).count(),
            'click_through_rate': (
                queryset.filter(click_through=True).count() / queryset.count() * 100
                if queryset.count() > 0 else 0
            )
        }
        
        return Response(analysis)
    
    @action(detail=False, methods=['get'])
    def geographic_distribution(self, request):
        """
        Get geographic distribution of impressions.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Geographic distribution data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Aggregate geographic data from JSON fields
        geographic_data = {}
        for impression in queryset:
            if impression.geographic_info:
                country = impression.geographic_info.get('country', 'Unknown')
                region = impression.geographic_info.get('region', 'Unknown')
                key = f"{country} - {region}"
                geographic_data[key] = geographic_data.get(key, 0) + 1
        
        # Convert to list format
        geo_list = [
            {'location': location, 'count': count}
            for location, count in geographic_data.items()
        ]
        
        return Response(sorted(geo_list, key=lambda x: x['count'], reverse=True))


class VastResponseViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for VAST Response tracking data.
    
    Handles VAST response tracking and performance analysis
    for ad serving optimization.
    
    Endpoints:
        GET /api/analytics/vast-responses/ - List VAST responses
        POST /api/analytics/vast-responses/ - Create VAST response record
        GET /api/analytics/vast-responses/{id}/ - Retrieve specific response
        GET /api/analytics/vast-responses/summary/ - Get summary statistics
        GET /api/analytics/vast-responses/performance-analysis/ - Performance analysis
        GET /api/analytics/vast-responses/error-analysis/ - Error analysis
    """
    
    queryset = VastResponse.objects.all()
    serializer_class = VastResponseSerializer
    filterset_class = VastResponseFilter
    search_fields = ['campaign__name', 'adspot__name', 'vast_url']
    ordering_fields = ['served_at', 'response_time', 'status_code']
    
    @action(detail=False, methods=['get'])
    def performance_analysis(self, request):
        """
        Get VAST response performance analysis.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Performance analysis data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Calculate performance metrics
        analysis = {
            'total_responses': queryset.count(),
            'successful_responses': queryset.filter(status_code__range=(200, 299)).count(),
            'average_response_time': queryset.aggregate(Avg('response_time'))['response_time__avg'],
            'fast_responses': queryset.filter(response_time__lt=500).count(),
            'slow_responses': queryset.filter(response_time__gte=1000).count(),
            'response_time_buckets': {
                '0-100ms': queryset.filter(response_time__lt=100).count(),
                '100-500ms': queryset.filter(response_time__gte=100, response_time__lt=500).count(),
                '500-1000ms': queryset.filter(response_time__gte=500, response_time__lt=1000).count(),
                '1000ms+': queryset.filter(response_time__gte=1000).count(),
            }
        }
        
        return Response(analysis)
    
    @action(detail=False, methods=['get'])
    def error_analysis(self, request):
        """
        Get VAST response error analysis.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Error analysis data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Group by status code
        status_codes = queryset.values('status_code').annotate(
            count=Count('id')
        ).order_by('-count')
        
        # Get error messages
        error_messages = queryset.exclude(
            error_message=''
        ).values('error_message').annotate(
            count=Count('id')
        ).order_by('-count')[:10]
        
        analysis = {
            'status_code_distribution': list(status_codes),
            'common_errors': list(error_messages),
            'error_rate': (
                queryset.exclude(status_code__range=(200, 299)).count() / queryset.count() * 100
                if queryset.count() > 0 else 0
            )
        }
        
        return Response(analysis)


class PerformanceMetricViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for Performance Metrics data.
    
    Handles aggregated performance metrics for campaigns,
    channels, and overall system performance.
    
    Endpoints:
        GET /api/analytics/performance/ - List performance metrics
        POST /api/analytics/performance/ - Create performance record
        GET /api/analytics/performance/{id}/ - Retrieve specific metric
        GET /api/analytics/performance/summary/ - Get summary statistics
        GET /api/analytics/performance/trends/ - Get performance trends
        GET /api/analytics/performance/top-performers/ - Get top performers
    """
    
    queryset = PerformanceMetric.objects.all()
    serializer_class = PerformanceMetricSerializer
    filterset_class = PerformanceMetricFilter
    search_fields = ['campaign__name', 'channel__name']
    ordering_fields = ['date', 'impressions_count', 'ctr', 'revenue', 'roi']
    
    @action(detail=False, methods=['get'])
    def trends(self, request):
        """
        Get performance trends over time.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Performance trends data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Group by date and metric type
        trends = queryset.values('date', 'metric_type').annotate(
            total_impressions=Sum('impressions_count'),
            total_clicks=Sum('clicks_count'),
            avg_ctr=Avg('ctr'),
            total_revenue=Sum('revenue'),
            avg_roi=Avg('roi')
        ).order_by('date')
        
        return Response(list(trends))
    
    @action(detail=False, methods=['get'])
    def top_performers(self, request):
        """
        Get top performing campaigns and channels.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Top performers data
        """
        queryset = self.filter_queryset(self.get_queryset())
        
        # Top campaigns by revenue
        top_campaigns = queryset.values(
            'campaign__name', 'campaign__id'
        ).annotate(
            total_revenue=Sum('revenue'),
            total_impressions=Sum('impressions_count'),
            avg_ctr=Avg('ctr'),
            avg_roi=Avg('roi')
        ).order_by('-total_revenue')[:10]
        
        # Top channels by impressions
        top_channels = queryset.exclude(
            channel__isnull=True
        ).values(
            'channel__name', 'channel__id'
        ).annotate(
            total_impressions=Sum('impressions_count'),
            total_revenue=Sum('revenue'),
            avg_ctr=Avg('ctr')
        ).order_by('-total_impressions')[:10]
        
        return Response({
            'top_campaigns': list(top_campaigns),
            'top_channels': list(top_channels)
        })


class AnalyticsReportViewSet(BaseAnalyticsViewSet):
    """
    ViewSet for Analytics Report management.
    
    Handles report generation, storage, and retrieval
    for analytics data export and sharing.
    
    Endpoints:
        GET /api/analytics/reports/ - List reports
        POST /api/analytics/reports/ - Generate new report
        GET /api/analytics/reports/{id}/ - Retrieve specific report
        DELETE /api/analytics/reports/{id}/ - Delete report
        GET /api/analytics/reports/{id}/download/ - Download report
        POST /api/analytics/reports/generate/ - Generate custom report
    """
    
    queryset = AnalyticsReport.objects.all()
    serializer_class = AnalyticsReportSerializer
    search_fields = ['name', 'report_type']
    ordering_fields = ['generated_at', 'name', 'file_size']
    
    def create(self, request, *args, **kwargs):
        """
        Create and generate analytics report.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Created report metadata
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        
        # Set the user who generated the report
        serializer.validated_data['generated_by'] = request.user
        
        # Create the report record
        report = serializer.save()
        
        # Trigger report generation task
        # generate_analytics_report.delay(report.id)
        
        return Response(
            self.get_serializer(report).data,
            status=status.HTTP_201_CREATED
        )
    
    @action(detail=True, methods=['get'])
    def download(self, request, pk=None):
        """
        Download analytics report file.
        
        Args:
            request: HTTP request
            pk: Report primary key
            
        Returns:
            HttpResponse: File download response
        """
        report = self.get_object()
        
        if not report.file_path or report.is_expired:
            return Response(
                {'error': _('Report file not available or expired.')},
                status=status.HTTP_404_NOT_FOUND
            )
        
        # Increment download counter
        report.increment_download_count()
        
        # Return file response (implementation would depend on file storage)
        response = HttpResponse(
            content_type='application/octet-stream'
        )
        response['Content-Disposition'] = f'attachment; filename="{report.name}"'
        
        return response
    
    @action(detail=False, methods=['post'])
    def generate(self, request):
        """
        Generate custom analytics report.
        
        Args:
            request: HTTP request with report parameters
            
        Returns:
            Response: Report generation status
        """
        # This would implement custom report generation
        # based on user-provided parameters
        
        return Response({
            'message': _('Report generation started.'),
            'status': 'processing'
        })


class AnalyticsDashboardView(APIView):
    """
    Analytics Dashboard API view.
    
    Provides aggregated analytics data for dashboard display
    including key performance indicators and visualizations.
    
    Endpoints:
        GET /api/analytics/dashboard/ - Get dashboard data
        POST /api/analytics/dashboard/ - Get dashboard with custom parameters
    """
    
    permission_classes = [permissions.IsAuthenticated]
    
    def get(self, request):
        """
        Get default dashboard data.
        
        Args:
            request: HTTP request
            
        Returns:
            Response: Dashboard data
        """
        # Default to last 30 days
        end_date = timezone.now().date()
        start_date = end_date - timedelta(days=30)
        
        dashboard_data = self._get_dashboard_data(start_date, end_date)
        
        return Response(dashboard_data)
    
    def post(self, request):
        """
        Get dashboard data with custom parameters.
        
        Args:
            request: HTTP request with dashboard parameters
            
        Returns:
            Response: Custom dashboard data
        """
        serializer = AnalyticsDashboardSerializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        
        start_date = serializer.validated_data['start_date']
        end_date = serializer.validated_data['end_date']
        
        dashboard_data = self._get_dashboard_data(start_date, end_date)
        
        return Response(dashboard_data)
    
    def _get_dashboard_data(self, start_date, end_date):
        """
        Get aggregated dashboard data for date range.
        
        Args:
            start_date (date): Start date
            end_date (date): End date
            
        Returns:
            dict: Dashboard data
        """
        # Use analytics service for data aggregation
        analytics_service = AnalyticsService()
        
        return analytics_service.get_dashboard_data(start_date, end_date)


class AnalyticsExportView(APIView):
    """
    Analytics Data Export API view.
    
    Handles data export requests for various analytics data
    in different formats (CSV, Excel, JSON).
    
    Endpoints:
        POST /api/analytics/export/ - Export analytics data
    """
    
    permission_classes = [permissions.IsAuthenticated]
    
    def post(self, request):
        """
        Export analytics data.
        
        Args:
            request: HTTP request with export parameters
            
        Returns:
            Response: Export status or file download
        """
        serializer = AnalyticsExportSerializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        
        export_params = serializer.validated_data
        
        # Use data exporter service
        exporter = DataExporter()
        
        if export_params.get('email_delivery', False):
            # Trigger background export task
            # export_analytics_data.delay(export_params, request.user.id)
            
            return Response({
                'message': _('Export started. You will receive an email when ready.'),
                'status': 'processing'
            })
        else:
            # Generate and return file immediately
            file_response = exporter.export_data(export_params)
            
            return file_response