import requests
from app.entities.search_response import SearchResponse, NewsResponse
from app.core.config import settings
from app.shared.kafka_client.kafka_client import SearchKafkaClient
from typing import List, Dict, Any, Optional, Literal
from app.repos.mongodb_repo import NewsRepository
from app.usecases.create_news_response import NewsService
import time
import logging
import traceback

logger = logging.getLogger(__name__)

# You could inject this from elsewhere
repository = NewsRepository(mongo_url=settings.MONGODB_URL, db_name=settings.DATABASE_NAME)
service = NewsService(repository=repository)

class NewsAPIService:

    
    def __init__(self):
        # Original Kafka client initialization
        self.kafka_client = SearchKafkaClient(
            bootstrap_servers=settings.KAFKA_BOOTSTRAP_SERVERS,
            consumer_group=settings.KAFKA_CONSUMER_GROUP
        )
        self.headers = {
            "x-rapidapi-host": "real-time-news-data.p.rapidapi.com",
            "x-rapidapi-key": settings.RAPID_API_KEY
        }

    async def fetch_news(self, message: Dict[str, Any]):
        logger.info("Rapid API started")
        task_id = message.get("id", "unknown")
        max_retries = 3
        backoff_seconds = 2

        try:
                keywords = message.get("keywords", [])
                max_results = message.get("max_results", 10)

                params = {
                    "query": keywords,
                    "limit": max_results,
                    "time_published": message.get("time_published"),
                }

                logger.info(f"Message: {message}")

                for attempt in range(1, max_retries + 1):
                    try:
                        response = requests.get(
                            settings.RAPID_API_URL,
                            headers=self.headers,
                            params=params
                        )

                        if response.status_code == 429:
                            retry_after = int(response.headers.get("Retry-After", backoff_seconds))
                            logger.warning(f"Rate limit hit (429). Waiting {retry_after}s before retry {attempt}/{max_retries}...")
                            time.sleep(retry_after)
                            continue

                        response.raise_for_status()
                        print(response.json())
                        news_response = NewsResponse(**response.json())
                        
                        # logger.info([res.source for res in news_response])

                        try:
                            result = await service.handle_news_response(news_response)
                            logger.info(f"Successfully saved to MongoDB. Result: {result}")
                        except Exception as mongo_error:
                            logger.error(f"MongoDB save error: {mongo_error}")
                            logger.error(f"MongoDB save traceback: {traceback.format_exc()}")
                            raise
                        break  # success, exit retry loop

                    except requests.exceptions.RequestException as e:
                        logger.error(f"HTTP error on attempt {attempt}/{max_retries}: {e}")
                        if attempt == max_retries:
                            raise
                        await asyncio.sleep(backoff_seconds * attempt)  # exponential backoff

        except Exception as e:
            logger.error(f"Error processing enhanced search task: {e}")
            logger.error(traceback.format_exc())
            await self.kafka_client.send_error_result(
                task_id=task_id,
                platform=message.get("platforms"),
                error=str(e)
            )
               