import logging
from datetime import date, datetime, timedelta
from typing import Any, Dict, List, Optional

try:  # pragma: no cover - runtime dependency may be missing/broken
    import pandas as pd  # type: ignore
    _PANDAS_OK = True
except Exception:  # pragma: no cover
    pd = None  # type: ignore
    _PANDAS_OK = False
from fastapi import HTTPException

from app.v1.services.zerodha.client import ZerodhaClient
from app.v1.services.zerodha.indicators import IndicatorCalculator
from app.v1.services.zerodha.strategy_features import StrategyFeatureCalculator
from app.v1.services.intraday_store import GLOBAL_INTRADAY_STORE
from app.v1.services.stock_history import get_stock_history
from app.v1.services.stocks_master import refresh_stocks_master

from .config import MAX_CANDLES_PER_TIMEFRAME, TEGPT_VERBOSE_LOGS

logger = logging.getLogger(__name__)


def _as_date(v: Any) -> Optional[date]:
    if v is None:
        return None
    if isinstance(v, date) and not isinstance(v, datetime):
        return v
    if isinstance(v, datetime):
        return v.date()
    if isinstance(v, str):
        s = v.strip()
        if not s:
            return None
        try:
            return datetime.fromisoformat(s.replace("Z", "+00:00")).date()
        except Exception:
            if len(s) >= 10:
                try:
                    return datetime.fromisoformat(s[:10]).date()
                except Exception:
                    return None
    return None


def _num(v: Any) -> Optional[float]:
    try:
        if v is None or isinstance(v, bool):
            return None
        return float(v)
    except Exception:
        return None


def _aggregate_ohlcv(daily: List[Dict[str, Any]], *, mode: str) -> List[Dict[str, Any]]:
    """Aggregate daily candles into weekly/monthly candles.

    mode:
      - "week"  => ISO week buckets (week start = Monday)
      - "month" => calendar month buckets (month start = day 1)
    """

    if not daily:
        return []

    rows: List[Dict[str, Any]] = []
    for r in daily:
        if not isinstance(r, dict):
            continue
        d = _as_date(r.get("date"))
        o = _num(r.get("open"))
        h = _num(r.get("high"))
        l = _num(r.get("low"))
        c = _num(r.get("close"))
        if d is None or o is None or h is None or l is None or c is None:
            continue
        v = _num(r.get("volume"))
        rows.append({"date": d, "open": o, "high": h, "low": l, "close": c, "volume": v})

    if not rows:
        return []

    rows.sort(key=lambda x: x["date"])

    buckets: Dict[str, List[Dict[str, Any]]] = {}
    order: List[str] = []
    for r in rows:
        d: date = r["date"]
        if mode == "week":
            iso = d.isocalendar()
            start = date.fromisocalendar(int(iso.year), int(iso.week), 1)
            key = start.isoformat()
        else:
            start = date(int(d.year), int(d.month), 1)
            key = start.isoformat()

        if key not in buckets:
            buckets[key] = []
            order.append(key)
        buckets[key].append(r)

    out: List[Dict[str, Any]] = []
    for key in order:
        b = buckets.get(key) or []
        if not b:
            continue
        o = b[0]["open"]
        c = b[-1]["close"]
        h = max(x["high"] for x in b)
        l = min(x["low"] for x in b)

        v_sum = 0.0
        has_vol = False
        for x in b:
            if x.get("volume") is None:
                continue
            has_vol = True
            v_sum += float(x["volume"])

        candle = {"date": key, "open": o, "high": h, "low": l, "close": c}
        if has_vol:
            candle["volume"] = v_sum
        out.append(candle)

    return out


def get_zerodha_client_service(db, current_user) -> ZerodhaClient:
    """Get Zerodha client for current user."""
    user_id = str(current_user.get("_id"))
    settings = db["zerodha_settings"].find_one({"user_id": user_id})

    if not settings:
        raise HTTPException(status_code=404, detail="Zerodha settings not found")

    if not settings.get("access_token"):
        raise HTTPException(status_code=403, detail="Zerodha not authenticated")

    return ZerodhaClient(api_key=settings["api_key"], api_secret=settings["api_secret"], access_token=settings["access_token"])


def refresh_instruments_service(db, zerodha_client: ZerodhaClient) -> Dict[str, Any]:
    """Refresh stocks master list from Zerodha instruments.

    IMPORTANT: This is the only allowed stock creation path.
    """
    try:
        res = refresh_stocks_master(db=db, zerodha_client=zerodha_client, exchanges=["NSE"], allow_cache_write=True)
        return {"count": int(res.get("inserted", 0)) + int(res.get("updated", 0)) + int(res.get("kept", 0)), "timestamp": res.get("timestamp")}
    except Exception as e:
        logger.exception("Failed to refresh stocks master")
        raise HTTPException(status_code=500, detail=f"Stocks master refresh failed: {str(e)}")


def get_instrument_token(db, symbol: str) -> Optional[int]:
    """Get instrument token for symbol from `stocks` master list."""
    try:
        sym = (symbol or "").strip().upper()
        stock = db["stocks"].find_one({"symbol": sym, "exchange": "NSE"}, {"instrument_token": 1})
        tok = stock.get("instrument_token") if isinstance(stock, dict) else None
        return int(tok) if tok is not None else None
    except Exception:
        return None


def fetch_market_data(
    zerodha_client: ZerodhaClient,
    symbol: str,
    timeframes: List[str],
    db=None,
    include_quote: bool = True,
) -> Dict[str, Any]:
    """Fetch comprehensive market data for analysis."""
    data: Dict[str, Any] = {
        "symbol": symbol,
        "timestamp": datetime.utcnow().isoformat(),
        "quote": {},
        "candles": {},
        "error": None,
    }

    try:
        logger.debug("[teGPT] Market data build start | %s", symbol)

        symbol = (symbol or "").strip().upper()
        if not symbol:
            data["error"] = "symbol is required"
            return data

        # Source of truth: `stocks` master list.
        stock = None
        if db is not None:
            try:
                stock = db["stocks"].find_one({"symbol": symbol, "exchange": "NSE"}) or db["stocks"].find_one({"symbol": symbol})
            except Exception:
                stock = None

        if not isinstance(stock, dict):
            data["error"] = f"Stock not found in stocks master list: {symbol}. Refresh stocks master first."
            return data

        token = stock.get("instrument_token")
        stock_id = stock.get("stock_id")
        data["instrument_token"] = token
        data["stock_id"] = stock_id

        if token is None:
            data["error"] = f"instrument_token missing in stocks master list for {symbol}"
            return data

        if include_quote:
            try:
                quote_data = zerodha_client.get_quote([f"NSE:{symbol}"])
                data["quote"] = quote_data.get(f"NSE:{symbol}", {})
            except Exception as e:
                logger.error("❌ Quote fetch failed for %s: %s", symbol, e)
                data["quote"] = {}

        # ---- Candle rules ----
        # - Daily/Weekly/Monthly come ONLY from DB history and are maintained by background jobs.
        # - Intraday (5m/15m/30m) is fetched live for today only and cached in memory.
        # - Intraday must never be persisted into history.

        # Load daily history once if needed (used to derive week/month).
        day_candles_db: List[Dict[str, Any]] = []
        if db is not None:
            want = {str(tf).strip().lower() for tf in (timeframes or [])}
            if "day" in want or "week" in want or "month" in want:
                try:
                    day_candles_db = get_stock_history(db, str(stock_id), "day")[:]
                except Exception:
                    day_candles_db = []

        for timeframe in timeframes:
            try:
                tf = str(timeframe or "").strip().lower()

                if tf in ("day", "week", "month"):
                    if db is None:
                        data["candles"][tf] = []
                    else:
                        if tf == "day":
                            data["candles"][tf] = day_candles_db[:]
                        elif tf == "week":
                            data["candles"][tf] = _aggregate_ohlcv(day_candles_db, mode="week")
                        else:
                            data["candles"][tf] = _aggregate_ohlcv(day_candles_db, mode="month")

                elif tf in ("5minute", "15minute", "30minute"):
                    data["candles"][tf] = GLOBAL_INTRADAY_STORE.get_intraday_candles(
                        zerodha_client=zerodha_client,
                        instrument_token=int(token),
                        timeframe=tf,
                        max_candles=100,
                    )

                else:
                    # Not supported by the new non-negotiable rules.
                    data["candles"][tf] = []

            except Exception as e:
                logger.error("❌ Candle fetch failed for %s %s: %s", symbol, timeframe, e)
                data["candles"][timeframe] = []

        indicators_summary: Dict[str, Any] = {}
        if _PANDAS_OK:
            for tf, tf_candles in data["candles"].items():
                if not tf_candles:
                    continue
                try:
                    df_tf = pd.DataFrame(tf_candles)
                    summary = IndicatorCalculator.summarize_dataframe(df_tf)
                    if summary:
                        indicators_summary[tf] = summary
                except Exception as e:
                    logger.error("❌ Indicator summary failed for %s %s: %s", symbol, tf, e)

        if indicators_summary:
            data["indicators"] = indicators_summary

        if _PANDAS_OK:
            try:
                day_candles = data["candles"].get("day") or []
                if day_candles:
                    df_day = pd.DataFrame(day_candles)
                    pivots = IndicatorCalculator.calculate_pivot_points(df_day)
                    data["pivots"] = {"day": pivots}

                    fib_levels = IndicatorCalculator.calculate_fibonacci_levels(df_day)
                    if fib_levels:
                        data["fib"] = {"day": fib_levels}
            except Exception as e:
                logger.error("❌ Pivot/Fibonacci calculation failed for %s: %s", symbol, e)

        try:
            strategies = StrategyFeatureCalculator.summarize_strategies(
                candles_by_timeframe=data.get("candles", {}),
                fib=(data.get("fib", {}) or {}).get("day"),
            )
            if strategies:
                data["strategies"] = strategies
        except Exception as e:
            logger.error("❌ Strategy feature calculation failed for %s: %s", symbol, e)

        # After computations, cap candle arrays to control payload size.
        try:
            max_keep = int(MAX_CANDLES_PER_TIMEFRAME)
        except Exception:
            max_keep = 0

        if max_keep and max_keep > 0:
            try:
                for tf, series in (data.get("candles") or {}).items():
                    if isinstance(series, list) and len(series) > max_keep:
                        data["candles"][tf] = series[-max_keep:]
            except Exception:
                pass

        total_candles = sum(len(c) for c in data["candles"].values()) if data["candles"] else 0
        try:
            keys = list((data.get("candles") or {}).keys())
            kept = {k: len((data.get("candles") or {}).get(k) or []) for k in keys}
        except Exception:
            kept = {}

        logger.info(
            "[teGPT] Zerodha OK | %s | quote=%s | candles=%s | total=%d",
            symbol,
            "yes" if data.get("quote") else "no",
            kept,
            int(total_candles),
        )

        if TEGPT_VERBOSE_LOGS:
            logger.info("🎉 ZERODHA DATA COMPLETE for %s", symbol)

        if not data["quote"] and total_candles == 0:
            msg = "No market data available from Zerodha (quote and candles empty)"
            logger.error("❌ %s for %s - likely auth/token issue", msg, symbol)
            data["error"] = msg

        return data

    except Exception as e:
        logger.exception("Market data fetch failed for %s", symbol)
        data["error"] = str(e)
        return data
