"""Enhanced FastAPI application with monitoring, caching, and advanced features."""

import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query, Path
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
from datetime import datetime
from typing import Optional, List

from ..models.schemas import (
    ScrapeRequest, ScrapeResponse, ScrapeStatus, 
    DeleteResponse, EnginesResponse, HealthResponse
)
from ..core.scraper_service import scraper_service
from ..services.database import db_service
from ..services.kafka_service import kafka_service
from ..services.cache_service import cache_service
from ..services.monitoring_service import monitoring_service
from ..scrapers.scraper_manager import scraper_manager
from ..config.settings import LOG_LEVEL, LOG_FORMAT

# Configure logging
logging.basicConfig(level=getattr(logging, LOG_LEVEL), format=LOG_FORMAT)
logger = logging.getLogger(__name__)

@asynccontextmanager
async def lifespan(app: FastAPI):
    """Application lifespan manager."""
    # Startup
    logger.info("Starting up the enhanced application...")
    
    # Initialize database
    await db_service.connect()
    
    # Initialize Kafka
    kafka_service.connect_producer()
    
    # Initialize cache
    cache_service.connect()
    
    logger.info("Application started successfully")
    
    yield
    
    # Shutdown
    logger.info("Shutting down the application...")
    
    # Close connections
    await db_service.disconnect()
    kafka_service.close()
    cache_service.disconnect()
    scraper_manager.close()
    
    logger.info("Application shut down successfully")

app = FastAPI(
    title="Enhanced Google Scraper API",
    description="Multi-engine search scraper with caching, monitoring, and advanced features",
    version="2.0.0",
    lifespan=lifespan
)

# Add CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Configure properly in production
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.get("/")
async def root():
    """Root endpoint."""
    return {
        "message": "Enhanced Google Scraper API",
        "version": "2.0.0",
        "features": [
            "Multi-engine scraping",
            "Redis caching",
            "Performance monitoring",
            "Kafka message queue",
            "Advanced analytics"
        ]
    }

@app.get("/health", response_model=HealthResponse)
async def health_check():
    """Enhanced health check endpoint."""
    services = {
        "kafka": kafka_service.healthy,
        "database": db_service.client is not None,
        "cache": cache_service.connected,
        "scraper": True,
        "monitoring": monitoring_service.monitoring_enabled
    }
    
    status = "healthy" if all(services.values()) else "unhealthy"
    
    return HealthResponse(
        status=status,
        services=services
    )

@app.get("/engines", response_model=EnginesResponse)
async def get_engines():
    """Get available search engines."""
    return EnginesResponse(
        available_engines=scraper_manager.get_available_engines(),
        description=scraper_manager.get_engine_descriptions()
    )

@app.post("/scrape", response_model=ScrapeResponse)
async def scrape(request: ScrapeRequest):
    """Submit a scrape request."""
    try:
        # Validate engines
        available_engines = scraper_manager.get_available_engines()
        if "all" not in request.engines:
            invalid_engines = [e for e in request.engines if e not in available_engines]
            if invalid_engines:
                raise HTTPException(
                    status_code=400,
                    detail=f"Invalid engines: {invalid_engines}. Available: {available_engines}"
                )
        
        # Submit request
        task_id = await scraper_service.submit_scrape_request(request)
        
        # Resolve engines for response
        engines = scraper_service._resolve_engines(request.engines)
        
        return ScrapeResponse(
            task_id=task_id,
            status=ScrapeStatus.PENDING,
            message="Scrape request submitted successfully",
            engines=engines
        )
        
    except Exception as e:
        logger.error(f"Error submitting scrape request: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/result/{task_id}")
async def get_result(task_id: str = Path(..., description="Task ID")):
    """Get scrape result by task ID."""
    try:
        result = await scraper_service.get_task_status(task_id)
        
        if result.get("status") == "not_found":
            raise HTTPException(status_code=404, detail="Task not found")
        
        return JSONResponse(content=result)
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Error getting result: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/results")
async def get_all_results(
    limit: int = Query(100, ge=1, le=1000, description="Maximum number of results"),
    offset: int = Query(0, ge=0, description="Offset for pagination")
):
    """Get all scrape results with pagination."""
    try:
        results = await db_service.get_all_results(limit)
        return JSONResponse(content={"results": results, "count": len(results)})
        
    except Exception as e:
        logger.error(f"Error getting all results: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.delete("/results", response_model=DeleteResponse)
async def delete_results(query: Optional[str] = Query(None, description="Query pattern to match")):
    """Delete scrape results."""
    try:
        deleted_count = await db_service.delete_results(query)
        
        message = f"Deleted {deleted_count} results"
        if query:
            message += f" matching query: {query}"
        
        return DeleteResponse(
            message=message,
            deleted_count=deleted_count
        )
        
    except Exception as e:
        logger.error(f"Error deleting results: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/status/{task_id}")
async def get_task_status(task_id: str = Path(..., description="Task ID")):
    """Get the status of a specific task."""
    try:
        status = await scraper_service.get_task_status(task_id)
        return JSONResponse(content=status)
        
    except Exception as e:
        logger.error(f"Error getting task status: {e}")
        raise HTTPException(status_code=500, detail=str(e))

# New enhanced endpoints

@app.get("/stats")
async def get_statistics():
    """Get comprehensive system statistics."""
    try:
        stats = scraper_manager.get_scraper_stats()
        return JSONResponse(content=stats)
        
    except Exception as e:
        logger.error(f"Error getting statistics: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/performance")
async def get_performance_report():
    """Get detailed performance report."""
    try:
        report = scraper_manager.get_performance_report()
        return JSONResponse(content=report)
        
    except Exception as e:
        logger.error(f"Error getting performance report: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/cache/stats")
async def get_cache_stats():
    """Get cache statistics."""
    try:
        stats = cache_service.get_cache_stats()
        return JSONResponse(content=stats)
        
    except Exception as e:
        logger.error(f"Error getting cache stats: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.delete("/cache")
async def clear_cache():
    """Clear all cached results."""
    try:
        cleared_count = scraper_manager.clear_cache()
        return JSONResponse(content={
            "message": f"Cleared {cleared_count} cached entries",
            "cleared_count": cleared_count
        })
        
    except Exception as e:
        logger.error(f"Error clearing cache: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/metrics/reset")
async def reset_metrics():
    """Reset all monitoring metrics."""
    try:
        scraper_manager.reset_metrics()
        return JSONResponse(content={"message": "Metrics reset successfully"})
        
    except Exception as e:
        logger.error(f"Error resetting metrics: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/engines/{engine_name}/stats")
async def get_engine_stats(engine_name: str = Path(..., description="Engine name")):
    """Get statistics for a specific engine."""
    try:
        stats = monitoring_service.get_scraper_metrics(engine_name)
        if not stats:
            raise HTTPException(status_code=404, detail="Engine not found or no data")
        
        return JSONResponse(content=stats)
        
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Error getting engine stats: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/errors")
async def get_error_logs(
    limit: int = Query(50, ge=1, le=500, description="Number of error logs to retrieve")
):
    """Get recent error logs."""
    try:
        errors = monitoring_service.get_error_logs(limit)
        return JSONResponse(content={"errors": errors, "count": len(errors)})
        
    except Exception as e:
        logger.error(f"Error getting error logs: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/engines/{engine_name}/proxy")
async def add_proxy_to_engine(
    engine_name: str = Path(..., description="Engine name"),
    proxy: str = Query(..., description="Proxy URL (e.g., http://proxy:port)")
):
    """Add proxy to a specific engine."""
    try:
        scraper_manager.add_proxy_to_scraper(engine_name, proxy)
        return JSONResponse(content={
            "message": f"Proxy added to {engine_name}",
            "proxy": proxy
        })
        
    except Exception as e:
        logger.error(f"Error adding proxy: {e}")
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/search/direct")
async def direct_search(
    query: str = Query(..., description="Search query"),
    engines: List[str] = Query(["all"], description="Search engines to use"),
    max_results: int = Query(10, ge=1, le=50, description="Maximum results per engine")
):
    """Direct search without task queue (for testing)."""
    try:
        request = ScrapeRequest(
            query=query,
            engines=engines,
            max_results=max_results
        )
        
        # Direct search using scraper manager
        results = await scraper_manager.scrape_multiple_engines(request)
        
        return JSONResponse(content={
            "query": query,
            "engines": engines,
            "results": [
                {
                    **result.dict(),
                    "timestamp": result.timestamp.isoformat()
                } for result in results
            ],
            "count": len(results)
        })
        
    except Exception as e:
        logger.error(f"Error in direct search: {e}")
        raise HTTPException(status_code=500, detail=str(e))

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=8000)
