"""
LLM Provider abstraction — provider-agnostic interface.

Primary: Google Gemini (free tier)
Fallback: Groq (free tier, Llama 3.1)
"""

from __future__ import annotations

import json
import os
from abc import ABC, abstractmethod
from typing import Any

import httpx


class LLMProvider(ABC):
    """Abstract base class for LLM providers."""

    @abstractmethod
    async def chat(
        self,
        messages: list[dict[str, str]],
        tools: list[dict[str, Any]] | None = None,
        system_prompt: str | None = None,
    ) -> dict[str, Any]:
        """
        Send messages to LLM and get response.
        Returns: {"content": str, "tool_calls": [{"name": str, "arguments": dict}]}
        """
        ...

    @abstractmethod
    def name(self) -> str: ...


class GeminiProvider(LLMProvider):
    """Google Gemini API provider (free tier)."""

    def __init__(self):
        self.api_key = os.getenv("GEMINI_API_KEY", "")
        self.model = os.getenv("GEMINI_MODEL", "gemini-1.5-flash")
        self.base_url = "https://generativelanguage.googleapis.com/v1beta"

    def name(self) -> str:
        return "gemini"

    async def chat(
        self,
        messages: list[dict[str, str]],
        tools: list[dict[str, Any]] | None = None,
        system_prompt: str | None = None,
    ) -> dict[str, Any]:
        if not self.api_key:
            raise ValueError("GEMINI_API_KEY not configured")

        url = f"{self.base_url}/models/{self.model}:generateContent?key={self.api_key}"

        # Convert messages to Gemini format
        contents = []
        for msg in messages:
            role = "user" if msg["role"] == "user" else "model"
            contents.append({"role": role, "parts": [{"text": msg["content"]}]})

        body: dict[str, Any] = {"contents": contents}

        if system_prompt:
            body["systemInstruction"] = {"parts": [{"text": system_prompt}]}

        # Convert tool definitions to Gemini format
        if tools:
            gemini_tools = []
            for tool in tools:
                gemini_tools.append({
                    "functionDeclarations": [{
                        "name": tool["name"],
                        "description": tool["description"],
                        "parameters": tool.get("parameters", {}),
                    }]
                })
            body["tools"] = gemini_tools

        async with httpx.AsyncClient(timeout=30) as client:
            resp = await client.post(url, json=body)
            resp.raise_for_status()
            data = resp.json()

        # Parse Gemini response
        candidates = data.get("candidates", [])
        if not candidates:
            return {"content": "Maaf, tidak ada respons.", "tool_calls": []}

        parts = candidates[0].get("content", {}).get("parts", [])
        content = ""
        tool_calls = []

        for part in parts:
            if "text" in part:
                content += part["text"]
            if "functionCall" in part:
                fc = part["functionCall"]
                tool_calls.append({
                    "name": fc["name"],
                    "arguments": fc.get("args", {}),
                })

        return {"content": content, "tool_calls": tool_calls}


class GroqProvider(LLMProvider):
    """Groq API provider (free tier, Llama 3.1)."""

    def __init__(self):
        self.api_key = os.getenv("GROQ_API_KEY", "")
        self.model = os.getenv("GROQ_MODEL", "llama-3.1-70b-versatile")
        self.base_url = "https://api.groq.com/openai/v1"

    def name(self) -> str:
        return "groq"

    async def chat(
        self,
        messages: list[dict[str, str]],
        tools: list[dict[str, Any]] | None = None,
        system_prompt: str | None = None,
    ) -> dict[str, Any]:
        if not self.api_key:
            raise ValueError("GROQ_API_KEY not configured")

        url = f"{self.base_url}/chat/completions"

        # Build OpenAI-compatible messages
        oai_messages = []
        if system_prompt:
            oai_messages.append({"role": "system", "content": system_prompt})
        for msg in messages:
            oai_messages.append({"role": msg["role"], "content": msg["content"]})

        body: dict[str, Any] = {
            "model": self.model,
            "messages": oai_messages,
            "temperature": 0.3,
            "max_tokens": 2048,
        }

        if tools:
            body["tools"] = [
                {
                    "type": "function",
                    "function": {
                        "name": t["name"],
                        "description": t["description"],
                        "parameters": t.get("parameters", {}),
                    },
                }
                for t in tools
            ]
            body["tool_choice"] = "auto"

        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json",
        }

        async with httpx.AsyncClient(timeout=30) as client:
            resp = await client.post(url, json=body, headers=headers)
            resp.raise_for_status()
            data = resp.json()

        choice = data["choices"][0]
        message = choice["message"]
        content = message.get("content", "") or ""
        tool_calls = []

        for tc in message.get("tool_calls", []):
            tool_calls.append({
                "name": tc["function"]["name"],
                "arguments": json.loads(tc["function"]["arguments"]),
            })

        return {"content": content, "tool_calls": tool_calls}


def get_provider() -> LLMProvider:
    """Get the configured primary provider, with fallback order."""
    if os.getenv("GEMINI_API_KEY"):
        return GeminiProvider()
    if os.getenv("GROQ_API_KEY"):
        return GroqProvider()
    raise ValueError(
        "No LLM provider configured. Set GEMINI_API_KEY or GROQ_API_KEY."
    )


def get_fallback_provider(current: LLMProvider) -> LLMProvider | None:
    """Get the fallback provider if primary fails."""
    if isinstance(current, GeminiProvider) and os.getenv("GROQ_API_KEY"):
        return GroqProvider()
    if isinstance(current, GroqProvider) and os.getenv("GEMINI_API_KEY"):
        return GeminiProvider()
    return None
