"""Redis cache service for storing and retrieving search results."""

import json
import logging
from typing import List, Dict, Optional, Any
from datetime import datetime, timedelta
import redis
import hashlib

from ..config.settings import REDIS_URL
from ..models.schemas import SearchResult

logger = logging.getLogger(__name__)

class CacheService:
    """Redis cache service for search results."""
    
    def __init__(self):
        self.redis_client: Optional[redis.Redis] = None
        self.connected = False
        self.default_ttl = 3600  # 1 hour
        self.key_prefix = "scraper:"
    
    def connect(self):
        """Connect to Redis."""
        try:
            self.redis_client = redis.from_url(REDIS_URL, decode_responses=True)
            
            # Test connection
            self.redis_client.ping()
            self.connected = True
            logger.info("Redis cache connected successfully")
            
        except Exception as e:
            logger.error(f"Redis connection failed: {e}")
            self.connected = False
    
    def disconnect(self):
        """Disconnect from Redis."""
        if self.redis_client:
            self.redis_client.close()
            self.connected = False
            logger.info("Redis cache disconnected")
    
    def _generate_cache_key(self, query: str, engines: List[str], max_results: int) -> str:
        """Generate cache key for search parameters."""
        # Create a hash of search parameters
        params = {
            'query': query.lower().strip(),
            'engines': sorted(engines),
            'max_results': max_results
        }
        
        params_str = json.dumps(params, sort_keys=True)
        hash_key = hashlib.md5(params_str.encode()).hexdigest()
        
        return f"{self.key_prefix}search:{hash_key}"
    
    def get_cached_results(self, query: str, engines: List[str], max_results: int) -> Optional[List[SearchResult]]:
        """Get cached search results."""
        if not self.connected:
            return None
        
        try:
            cache_key = self._generate_cache_key(query, engines, max_results)
            cached_data = self.redis_client.get(cache_key)
            
            if cached_data:
                data = json.loads(cached_data)
                
                # Convert back to SearchResult objects
                results = []
                for item in data['results']:
                    result = SearchResult(
                        title=item['title'],
                        url=item['url'],
                        description=item['description'],
                        engine=item['engine'],
                        position=item['position'],
                        timestamp=datetime.fromisoformat(item['timestamp'])
                    )
                    results.append(result)
                
                logger.info(f"Cache hit for query: '{query}' with {len(results)} results")
                return results
                
        except Exception as e:
            logger.error(f"Error getting cached results: {e}")
        
        return None
    
    def cache_results(self, query: str, engines: List[str], max_results: int, 
                     results: List[SearchResult], ttl: Optional[int] = None) -> bool:
        """Cache search results."""
        if not self.connected:
            return False
        
        try:
            cache_key = self._generate_cache_key(query, engines, max_results)
            
            # Convert SearchResult objects to dictionaries
            results_data = []
            for result in results:
                result_dict = {
                    'title': result.title,
                    'url': result.url,
                    'description': result.description,
                    'engine': result.engine,
                    'position': result.position,
                    'timestamp': result.timestamp.isoformat()
                }
                results_data.append(result_dict)
            
            cache_data = {
                'query': query,
                'engines': engines,
                'max_results': max_results,
                'results': results_data,
                'cached_at': datetime.now().isoformat(),
                'result_count': len(results)
            }
            
            # Store in Redis with TTL
            cache_ttl = ttl or self.default_ttl
            self.redis_client.setex(
                cache_key, 
                cache_ttl, 
                json.dumps(cache_data)
            )
            
            logger.info(f"Cached {len(results)} results for query: '{query}' (TTL: {cache_ttl}s)")
            return True
            
        except Exception as e:
            logger.error(f"Error caching results: {e}")
            return False
    
    def invalidate_cache(self, pattern: str = None) -> int:
        """Invalidate cache entries."""
        if not self.connected:
            return 0
        
        try:
            if pattern:
                # Delete keys matching pattern
                keys = self.redis_client.keys(f"{self.key_prefix}{pattern}")
            else:
                # Delete all cache keys
                keys = self.redis_client.keys(f"{self.key_prefix}*")
            
            if keys:
                deleted = self.redis_client.delete(*keys)
                logger.info(f"Invalidated {deleted} cache entries")
                return deleted
            
        except Exception as e:
            logger.error(f"Error invalidating cache: {e}")
        
        return 0
    
    def get_cache_stats(self) -> Dict[str, Any]:
        """Get cache statistics."""
        if not self.connected:
            return {'connected': False}
        
        try:
            info = self.redis_client.info()
            keys = self.redis_client.keys(f"{self.key_prefix}*")
            
            return {
                'connected': True,
                'total_keys': len(keys),
                'used_memory': info.get('used_memory_human', 'N/A'),
                'connected_clients': info.get('connected_clients', 0),
                'redis_version': info.get('redis_version', 'N/A'),
                'uptime_seconds': info.get('uptime_in_seconds', 0)
            }
            
        except Exception as e:
            logger.error(f"Error getting cache stats: {e}")
            return {'connected': False, 'error': str(e)}
    
    def clear_expired_cache(self) -> int:
        """Clear expired cache entries (manual cleanup)."""
        if not self.connected:
            return 0
        
        try:
            # Redis automatically handles TTL expiration
            # This method is for manual cleanup if needed
            keys = self.redis_client.keys(f"{self.key_prefix}*")
            expired_count = 0
            
            for key in keys:
                ttl = self.redis_client.ttl(key)
                if ttl == -1:  # Key exists but has no TTL
                    # Set default TTL for keys without expiration
                    self.redis_client.expire(key, self.default_ttl)
                    expired_count += 1
            
            logger.info(f"Set TTL for {expired_count} keys without expiration")
            return expired_count
            
        except Exception as e:
            logger.error(f"Error clearing expired cache: {e}")
            return 0

# Global cache service instance
cache_service = CacheService()
