"""Scraping endpoints."""

import logging
import time
from typing import List
from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import JSONResponse

from src.models.schemas import ScrapeRequest
from src.scrapers.multi_engine_manager import multi_engine_manager

logger = logging.getLogger(__name__)
router = APIRouter()


@router.post("/fast")
async def scrape_fast(request: ScrapeRequest):
    """Fast parallel scraping endpoint."""
    start_time = time.time()
    
    try:
        # Validate engines
        available_engines = multi_engine_manager.get_available_engines()
        if "all" not in request.engines:
            invalid_engines = [e for e in request.engines if e not in available_engines]
            if invalid_engines:
                raise HTTPException(
                    status_code=400,
                    detail=f"Invalid engines: {invalid_engines}. Available: {available_engines}"
                )
        
        # Perform parallel scraping
        results = await multi_engine_manager.scrape_multiple_engines_parallel(request)
        
        # Prepare response
        execution_time = time.time() - start_time
        engines_used = multi_engine_manager._resolve_engines(request.engines)
        
        # Group results by engine
        results_by_engine = {}
        for result in results:
            if result.engine not in results_by_engine:
                results_by_engine[result.engine] = []
            results_by_engine[result.engine].append({
                "title": result.title,
                "url": result.url,
                "description": result.description,
                "position": result.position,
                "timestamp": result.timestamp.isoformat()
            })
        
        return JSONResponse(content={
            "query": request.query,
            "engines_used": engines_used,
            "total_results": len(results),
            "execution_time": round(execution_time, 2),
            "results_by_engine": results_by_engine,
            "all_results": [
                {
                    "title": result.title,
                    "url": result.url,
                    "description": result.description,
                    "engine": result.engine,
                    "position": result.position,
                    "timestamp": result.timestamp.isoformat()
                } for result in results
            ]
        })
        
    except Exception as e:
        logger.error(f"Error in fast scraping: {e}")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/direct")
async def direct_search(
    query: str = Query(..., description="Search query"),
    engines: List[str] = Query(["bing"], description="Search engines to use"),
    max_results: int = Query(5, ge=1, le=50, description="Maximum results per engine"),
    parallel: bool = Query(True, description="Use parallel processing")
):
    """Direct search endpoint with parallel/sequential options."""
    try:
        request = ScrapeRequest(
            query=query,
            engines=engines,
            max_results=max_results
        )
        
        start_time = time.time()
        
        if parallel:
            results = await multi_engine_manager.scrape_multiple_engines_parallel(request)
        else:
            results = await multi_engine_manager.scrape_multiple_engines_sequential(request)
        
        execution_time = time.time() - start_time
        
        return JSONResponse(content={
            "query": query,
            "engines": engines,
            "parallel_processing": parallel,
            "execution_time": round(execution_time, 2),
            "total_results": len(results),
            "results": [
                {
                    "title": result.title,
                    "url": result.url,
                    "description": result.description,
                    "engine": result.engine,
                    "position": result.position,
                    "timestamp": result.timestamp.isoformat()
                } for result in results
            ]
        })
        
    except Exception as e:
        logger.error(f"Error in direct search: {e}")
        raise HTTPException(status_code=500, detail=str(e))
