"""
Performance Profiling and Caching System

This module provides comprehensive performance monitoring, profiling,
and caching utilities to optimize application performance.
"""

import time
import functools
import logging
from typing import Any, Callable, Dict, Optional
from django.core.cache import cache
from django.conf import settings
from django.db import connection
from django.utils import timezone
from contextlib import contextmanager
import json
import hashlib

logger = logging.getLogger(__name__)


class PerformanceProfiler:
    """
    Performance profiler for tracking execution times and database queries.
    """
    
    def __init__(self):
        self.enabled = getattr(settings, 'PERFORMANCE_PROFILING_ENABLED', False)
        self.cache_prefix = 'perf_profile'
        self.retention_hours = 24
    
    @contextmanager
    def profile(self, operation_name: str):
        """
        Context manager for profiling code execution.
        
        Args:
            operation_name: Name of the operation being profiled
        """
        if not self.enabled:
            yield
            return
        
        start_time = time.time()
        start_queries = len(connection.queries)
        
        try:
            yield
        finally:
            end_time = time.time()
            end_queries = len(connection.queries)
            
            execution_time = end_time - start_time
            query_count = end_queries - start_queries
            
            # Store profiling data
            self._store_profile_data(operation_name, execution_time, query_count)
    
    def _store_profile_data(self, operation_name: str, execution_time: float, query_count: int):
        """
        Store profiling data in cache for analysis.
        
        Args:
            operation_name: Name of the operation
            execution_time: Time taken to execute
            query_count: Number of database queries
        """
        profile_data = {
            'operation': operation_name,
            'execution_time': execution_time,
            'query_count': query_count,
            'timestamp': timezone.now().isoformat()
        }
        
        # Store in cache with timestamp key
        cache_key = f"{self.cache_prefix}:{operation_name}:{int(time.time())}"
        cache.set(cache_key, profile_data, timeout=self.retention_hours * 3600)
        
        # Log slow operations
        if execution_time > 1.0:  # Log operations taking more than 1 second
            logger.warning(
                f"Slow operation detected: {operation_name} took {execution_time:.2f}s "
                f"with {query_count} queries"
            )
    
    def get_profile_summary(self, operation_name: Optional[str] = None) -> Dict[str, Any]:
        """
        Get performance profile summary.
        
        Args:
            operation_name: Optional filter by operation name
            
        Returns:
            dict: Performance summary statistics
        """
        # This would typically query cached profile data
        # For now, return a placeholder summary
        return {
            'total_operations': 0,
            'avg_execution_time': 0.0,
            'avg_query_count': 0,
            'slow_operations': []
        }


def performance_monitor(operation_name: str = None):
    """
    Decorator for monitoring function performance.
    
    Args:
        operation_name: Optional custom name for the operation
        
    Returns:
        Decorated function
    """
    def decorator(func: Callable) -> Callable:
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            profiler = PerformanceProfiler()
            op_name = operation_name or f"{func.__module__}.{func.__name__}"
            
            with profiler.profile(op_name):
                return func(*args, **kwargs)
        
        return wrapper
    return decorator


class SmartCache:
    """
    Smart caching system with automatic invalidation and performance optimization.
    """
    
    def __init__(self):
        self.default_timeout = getattr(settings, 'CACHE_DEFAULT_TIMEOUT', 300)  # 5 minutes
        self.cache_prefix = 'smart_cache'
    
    def get_cache_key(self, key: str, *args, **kwargs) -> str:
        """
        Generate a consistent cache key with optional parameters.
        
        Args:
            key: Base cache key
            *args: Additional arguments to include in key
            **kwargs: Additional keyword arguments to include in key
            
        Returns:
            str: Generated cache key
        """
        # Create a hash of arguments for consistent key generation
        key_data = {
            'base_key': key,
            'args': args,
            'kwargs': sorted(kwargs.items()) if kwargs else []
        }
        
        key_string = json.dumps(key_data, sort_keys=True, default=str)
        key_hash = hashlib.md5(key_string.encode()).hexdigest()[:8]
        
        return f"{self.cache_prefix}:{key}:{key_hash}"
    
    def cached_query(self, cache_key: str, query_func: Callable, timeout: Optional[int] = None) -> Any:
        """
        Cache the result of a database query or expensive operation.
        
        Args:
            cache_key: Unique key for caching
            query_func: Function that performs the query/operation
            timeout: Cache timeout in seconds
            
        Returns:
            Cached or fresh query result
        """
        timeout = timeout or self.default_timeout
        
        # Try to get from cache first
        cached_result = cache.get(cache_key)
        if cached_result is not None:
            logger.debug(f"Cache hit for key: {cache_key}")
            return cached_result
        
        # Execute query and cache result
        logger.debug(f"Cache miss for key: {cache_key}, executing query")
        result = query_func()
        cache.set(cache_key, result, timeout=timeout)
        
        return result
    
    def invalidate_pattern(self, pattern: str):
        """
        Invalidate cache keys matching a pattern.
        
        Args:
            pattern: Pattern to match cache keys
        """
        # Note: This is a simplified implementation
        # In production, consider using Redis with pattern matching
        logger.info(f"Cache invalidation requested for pattern: {pattern}")
    
    def get_cache_stats(self) -> Dict[str, Any]:
        """
        Get cache performance statistics.
        
        Returns:
            dict: Cache statistics
        """
        # This would typically query cache backend for stats
        return {
            'cache_backend': str(cache.__class__),
            'default_timeout': self.default_timeout,
            'prefix': self.cache_prefix
        }


def cached_method(timeout: int = 300, key_prefix: str = None):
    """
    Decorator for caching method results.
    
    Args:
        timeout: Cache timeout in seconds
        key_prefix: Optional prefix for cache key
        
    Returns:
        Decorated method
    """
    def decorator(func: Callable) -> Callable:
        @functools.wraps(func)
        def wrapper(self, *args, **kwargs):
            smart_cache = SmartCache()
            
            # Generate cache key
            prefix = key_prefix or f"{self.__class__.__name__}.{func.__name__}"
            cache_key = smart_cache.get_cache_key(prefix, *args, **kwargs)
            
            # Use cached query
            return smart_cache.cached_query(
                cache_key,
                lambda: func(self, *args, **kwargs),
                timeout=timeout
            )
        
        return wrapper
    return decorator


def cached_function(timeout: int = 300, key_prefix: str = None):
    """
    Decorator for caching function results.
    
    Args:
        timeout: Cache timeout in seconds
        key_prefix: Optional prefix for cache key
        
    Returns:
        Decorated function
    """
    def decorator(func: Callable) -> Callable:
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            smart_cache = SmartCache()
            
            # Generate cache key
            prefix = key_prefix or f"{func.__module__}.{func.__name__}"
            cache_key = smart_cache.get_cache_key(prefix, *args, **kwargs)
            
            # Use cached query
            return smart_cache.cached_query(
                cache_key,
                lambda: func(*args, **kwargs),
                timeout=timeout
            )
        
        return wrapper
    return decorator


class QueryOptimizer:
    """
    Database query optimization utilities.
    """
    
    @staticmethod
    def optimize_queryset(queryset, select_related: list = None, prefetch_related: list = None):
        """
        Optimize a queryset with select_related and prefetch_related.
        
        Args:
            queryset: Django queryset to optimize
            select_related: List of fields for select_related
            prefetch_related: List of fields for prefetch_related
            
        Returns:
            Optimized queryset
        """
        if select_related:
            queryset = queryset.select_related(*select_related)
        
        if prefetch_related:
            queryset = queryset.prefetch_related(*prefetch_related)
        
        return queryset
    
    @staticmethod
    def get_query_count():
        """
        Get the current number of database queries executed.
        
        Returns:
            int: Number of queries
        """
        return len(connection.queries)
    
    @staticmethod
    def log_queries(func: Callable) -> Callable:
        """
        Decorator to log database queries for a function.
        
        Args:
            func: Function to monitor
            
        Returns:
            Decorated function
        """
        @functools.wraps(func)
        def wrapper(*args, **kwargs):
            start_queries = QueryOptimizer.get_query_count()
            result = func(*args, **kwargs)
            end_queries = QueryOptimizer.get_query_count()
            
            query_count = end_queries - start_queries
            if query_count > 10:  # Log if more than 10 queries
                logger.warning(
                    f"High query count in {func.__name__}: {query_count} queries"
                )
            
            return result
        
        return wrapper


# Global instances
profiler = PerformanceProfiler()
smart_cache = SmartCache()
query_optimizer = QueryOptimizer()
