import requests
from app.entities.search_response import SearchResponse, NewsResponse
from app.core.config import settings
from app.shared.kafka_client.kafka_client import SearchKafkaClient
from typing import List, Dict, Any, Optional, Literal
from app.repos.mongodb_repo import NewsRepository
from app.usecases.create_news_response import NewsService

import logging
import traceback

logger = logging.getLogger(__name__)

# You could inject this from elsewhere
repository = NewsRepository(mongo_url=settings.MONGODB_URL, db_name=settings.DATABASE_NAME)
service = NewsService(repository=repository)

class NewsAPIService:

    
    def __init__(self):
        # Original Kafka client initialization
        self.kafka_client = SearchKafkaClient(
            bootstrap_servers=settings.KAFKA_BOOTSTRAP_SERVERS,
            consumer_group=settings.KAFKA_CONSUMER_GROUP
        )
        self.headers = {
            "x-rapidapi-host": "real-time-news-data.p.rapidapi.com",
            "x-rapidapi-key": settings.RAPID_API_KEY
        }

    async def fetch_news(self, message: Dict[str, Any]):
        logger.info("Rapid API started")
        try:
            
            task_id = message.get("id")
            keywords = message.get("keywords", [])
            
            # Extract additional parameters if provided
            # search_engine = message.get("search_engine", self.search_engine)
            max_results = message.get("max_results",10)
            # deep_analysis = message.get("deep_analysis", self.deep_analysis)
            logger.info("Message:",message)
            params = {
                "query": message.get("keywords"),
                "limit": message.get("max_results"),
                "time_published": message.get("time_published"),

            }
            # print(message)
            # logger.info("Message: ",message)
            response = requests.get(settings.RAPID_API_URL, headers=self.headers,params=params)
            # logger.info(response.json())
            response.raise_for_status()
            news_response = NewsResponse(**response.json())
            # logger.info([res.source for res in news_response])
            file = open('news.txt', 'w')
            file.write(f'{news_response}')
            file.close()
            # Save to MongoDB
            try:
                result = await service.handle_news_response(news_response)
                logger.info(f"Successfully saved to MongoDB. Result: {result}")
            except Exception as mongo_error:
                logger.error(f"MongoDB save error: {mongo_error}")
                logger.error(f"MongoDB save traceback: {traceback.format_exc()}")
                raise

            
        except Exception as e:
            logger.error(f"Error processing enhanced search task: {e}")
            raise
            await self.kafka_client.send_error_result(
                task_id=message.get("id", "unknown"),
                platform=message.get("platforms"),
                error=str(e)
            )
        
        # response.raise_for_status()
        # print("Response: ",response.json())
        # return NewsResponse(**response.json())
