import os
import sys
import logging
import time
from typing import Dict, Any

# Add shared modules to path
sys.path.append('/app')
sys.path.append('/app/shared')

from shared.models.schemas import SearchResponse
from shared.utils.kafka_utils import KafkaClient, TOPICS
from shared.utils.redis_utils import RedisClient, get_cache_key, CACHE_KEYS, CACHE_EXPIRATION

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

class DataProcessingService:
    def __init__(self):
        self.kafka_client = KafkaClient()
        self.redis_client = RedisClient()
        
    def process_search_results(self, search_response: SearchResponse):
        """Process and aggregate search results"""
        try:
            logger.info(f"Processing search results for request: {search_response.request_id}")
            
            # Store results in Redis
            results_key = get_cache_key(CACHE_KEYS['SEARCH_RESULTS'], request_id=search_response.request_id)
            self.redis_client.set(results_key, search_response.dict(), expire=CACHE_EXPIRATION['SEARCH_RESULTS'])
            
            # Update request status
            status_key = get_cache_key(CACHE_KEYS['REQUEST_STATUS'], request_id=search_response.request_id)
            status_update = {
                "status": "completed" if not search_response.error else "failed",
                "progress": 100.0,
                "message": f"Search completed with {search_response.total_results} results" if not search_response.error else f"Search failed: {search_response.error}",
                "result_files": [results_key]
            }
            self.redis_client.hset(status_key, status_update)
            
            logger.info(f"Results processed and stored for request: {search_response.request_id}")
            
        except Exception as e:
            logger.error(f"Error processing search results: {e}")

    def handle_search_result(self, topic: str, message: dict, key: str):
        """Handle incoming search results"""
        try:
            search_response = SearchResponse(**message)
            self.process_search_results(search_response)
            
        except Exception as e:
            logger.error(f"Error handling search result: {e}")

    def start_consuming(self):
        """Start consuming messages from Kafka"""
        logger.info("Starting Data Processing Service...")
        
        try:
            self.kafka_client.consume_messages(
                topics=[TOPICS['SEARCH_RESULTS']],
                group_id='data-processing-service',
                message_handler=self.handle_search_result
            )
        except KeyboardInterrupt:
            logger.info("Data Processing Service stopped by user")
        except Exception as e:
            logger.error(f"Error in Data Processing Service: {e}")
        finally:
            self.cleanup()

    def cleanup(self):
        """Clean up resources"""
        if self.kafka_client:
            self.kafka_client.close()
        if self.redis_client:
            self.redis_client.close()

if __name__ == "__main__":
    service = DataProcessingService()
    service.start_consuming()
