"""Ultra-fast Yandex scraper using optimized Selenium."""

import logging
import time
from typing import List, Optional
from urllib.parse import urlencode
from selenium.webdriver.common.by import By

from .fast_selenium_base import FastSeleniumBase
from src.models.schemas import SearchResult

logger = logging.getLogger(__name__)

class YandexFastScraper(FastSeleniumBase):
    """Ultra-fast Yandex scraper with Selenium optimizations."""
    
    def __init__(self, headless: bool = True):
        super().__init__("yandex", headless, fast_mode=True)
        self.base_url = "https://yandex.com"
    
    def search(self, query: str, max_results: int = 10) -> List[SearchResult]:
        """Fast Yandex search."""
        results = []
        
        try:
            # Direct search URL
            search_url = f"https://yandex.com/search/?text={urlencode({'': query})[1:]}"
            
            if not self._fast_navigate(search_url):
                logger.error("Failed to navigate to Yandex")
                return results
            
            # Wait for results
            time.sleep(2)
            
            # Extract results
            results = self._extract_results_fast(max_results)
            
            logger.info(f"Yandex: Found {len(results)} results for '{query}'")
            
        except Exception as e:
            logger.error(f"Yandex search error: {e}")
        
        return results
    
    def _extract_results_fast(self, max_results: int) -> List[SearchResult]:
        """Fast Yandex result extraction."""
        results = []
        
        try:
            # Yandex result selectors
            result_selectors = ['.serp-item', '.organic', '.main__content .serp-item']
            
            containers = []
            for selector in result_selectors:
                containers = self._fast_find_elements(selector, timeout=8)
                if containers:
                    break
            
            if not containers:
                logger.warning("No Yandex result containers found")
                return results
            
            # Fast extraction
            for i, container in enumerate(containers[:max_results]):
                try:
                    result = self._extract_single_result_fast(container, i + 1)
                    if result:
                        results.append(result)
                except Exception as e:
                    logger.debug(f"Error extracting Yandex result {i}: {e}")
                    continue
            
        except Exception as e:
            logger.error(f"Error extracting Yandex results: {e}")
        
        return results
    
    def _extract_single_result_fast(self, container, position: int) -> Optional[SearchResult]:
        """Fast single Yandex result extraction."""
        try:
            title = ""
            url = ""
            description = ""
            
            # Title and URL
            title_selectors = ['h2 a', '.organic__title a', '.serp-item__title a']
            for selector in title_selectors:
                try:
                    title_elem = container.find_element(By.CSS_SELECTOR, selector)
                    title = self._get_text_fast(title_elem)
                    url = self._get_attribute_fast(title_elem, 'href')
                    if title and url:
                        break
                except:
                    continue
            
            # Description
            desc_selectors = ['.organic__text', '.serp-item__text', '.text-container']
            for selector in desc_selectors:
                try:
                    desc_elem = container.find_element(By.CSS_SELECTOR, selector)
                    description = self._get_text_fast(desc_elem)
                    if description:
                        break
                except:
                    continue
            
            if not title and not url:
                return None
            
            return self._create_result(title or url, url, description, position)
            
        except Exception as e:
            logger.debug(f"Error extracting single Yandex result: {e}")
            return None
