import logging
import os
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional

from app.v1.services.zerodha.client import ZerodhaClient

logger = logging.getLogger(__name__)


# Zerodha historical API enforces a maximum date-range per request.
# (Observed error: "interval exceeds max limit: 2000 days").
ZERODHA_MAX_HISTORICAL_RANGE_DAYS = int(os.getenv("ZERODHA_MAX_HISTORICAL_RANGE_DAYS", "2000"))
STOCK_HISTORY_COLLECTION = os.getenv("STOCK_HISTORY_COLLECTION", "stock_history")
STOCK_HISTORY_KEEP_DAY = int(os.getenv("STOCK_HISTORY_KEEP_DAY", "365"))


def _cap_from_date(now: datetime, frm_dt: datetime) -> datetime:
    """Ensure (now - frm_dt) does not exceed Zerodha max interval."""
    max_days = max(1, int(ZERODHA_MAX_HISTORICAL_RANGE_DAYS))
    if (now - frm_dt).days > max_days:
        # Keep a 1-day safety margin.
        return now - timedelta(days=max(1, max_days - 1))
    return frm_dt

def _as_date_key(v: Any) -> Optional[str]:
    """Normalize candle date to an ISO YYYY-MM-DD key."""
    if v is None:
        return None
    if isinstance(v, str):
        s = v.strip()
        if not s:
            return None
        # Accept both date-only and datetime strings.
        try:
            return datetime.fromisoformat(s.replace("Z", "+00:00")).date().isoformat()
        except Exception:
            # As a fallback, accept already-normalized YYYY-MM-DD.
            if len(s) >= 10:
                return s[:10]
            return None
    if isinstance(v, datetime):
        return v.date().isoformat()
    return None


def _trim(candles: List[Dict[str, Any]], keep: int) -> List[Dict[str, Any]]:
    if not candles:
        return []
    if keep <= 0:
        return []
    return candles[-keep:]

def _normalize_records(records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
    """Normalize Kite historical records into our storage shape.

    Input is the raw list from `kite.historical_data` (via `get_historical_data_records`).
    Output uses ISO `YYYY-MM-DD` strings for `date`.
    """
    if not records:
        return []
    out: List[Dict[str, Any]] = []
    for r in records:
        if not isinstance(r, dict):
            continue
        k = _as_date_key(r.get("date"))
        if not k:
            continue
        row: Dict[str, Any] = {
            "date": k,
            "open": r.get("open"),
            "high": r.get("high"),
            "low": r.get("low"),
            "close": r.get("close"),
        }
        if "volume" in r:
            row["volume"] = r.get("volume")
        out.append(row)
    # Sort by date ascending (stable merge + trimming).
    out.sort(key=lambda x: str(x.get("date") or ""))
    return out


def _merge_candles_by_date(existing: List[Dict[str, Any]], incoming: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
    """Merge two candle arrays, de-duplicating by YYYY-MM-DD `date` (keep incoming on conflict)."""
    if not existing and not incoming:
        return []
    out: Dict[str, Dict[str, Any]] = {}
    for r in (existing or []):
        if not isinstance(r, dict):
            continue
        k = _as_date_key(r.get("date"))
        if not k:
            continue
        rr = dict(r)
        rr["date"] = k
        out[k] = rr
    for r in (incoming or []):
        if not isinstance(r, dict):
            continue
        k = _as_date_key(r.get("date"))
        if not k:
            continue
        rr = dict(r)
        rr["date"] = k
        out[k] = rr
    return [out[k] for k in sorted(out.keys())]


def upsert_stock_history_bundle(
    *,
    db,
    stock_id: str,
    symbol: str,
    exchange: str,
    day: Optional[List[Dict[str, Any]]] = None,
    updated_at: Optional[datetime] = None,
) -> None:
    """Store daily candles for a stock.

    Minimal persistence model:
    - one doc per stock_id
    - only `day` candles are stored
    - `day_last_date` is stored for quick freshness checks
    """

    updated_at = updated_at or datetime.utcnow()

    set_fields: Dict[str, Any] = {
        "stock_id": str(stock_id),
        "symbol": str(symbol or "").strip().upper(),
        "exchange": str(exchange or "NSE").strip().upper(),
        "updated_at": updated_at,
        "source": "ZERODHA",
        # Schema v8: store ONLY daily candles + day_last_date.
        "schema_version": 8,
    }
    unset_fields: Dict[str, Any] = {}

    # If older docs used nested candles.* fields, remove them on write so the schema is clean.
    unset_fields["candles"] = ""
    # Keep docs clean: no count fields, no persisted week/month.
    unset_fields["day_count"] = ""
    unset_fields["week_count"] = ""
    unset_fields["month_count"] = ""
    unset_fields["week"] = ""
    unset_fields["month"] = ""
    unset_fields["week_last_date"] = ""
    unset_fields["month_last_date"] = ""

    if day is not None:
        day_trimmed = _trim(day, STOCK_HISTORY_KEEP_DAY)
        if day_trimmed:
            set_fields["day"] = day_trimmed
            set_fields["day_last_date"] = _as_date_key((day_trimmed[-1] or {}).get("date"))
        else:
            unset_fields["day"] = ""
            unset_fields["day_last_date"] = ""

    # NOTE: we intentionally do not persist week/month. They are derived at read-time.

    update: Dict[str, Any] = {"$set": set_fields, "$setOnInsert": {"created_at": updated_at}}
    if unset_fields:
        update["$unset"] = unset_fields

    db[STOCK_HISTORY_COLLECTION].update_one({"stock_id": str(stock_id)}, update, upsert=True)


# Backward-compatible helper: keep signature, but write into single-doc schema.
def upsert_stock_history(
    *,
    db,
    stock_id: str,
    symbol: str,
    exchange: str,
    timeframe: str,
    candles: List[Dict[str, Any]],
    updated_at: Optional[datetime] = None,
) -> None:
    tf = str(timeframe or "").strip().lower()
    if tf == "day":
        upsert_stock_history_bundle(db=db, stock_id=stock_id, symbol=symbol, exchange=exchange, day=candles, updated_at=updated_at)
        return
    raise ValueError(f"Unsupported timeframe: {timeframe}")


def get_stock_history(db, stock_id: str, timeframe: str) -> List[Dict[str, Any]]:
    tf = str(timeframe or "").strip().lower()
    if tf != "day":
        # Not persisted anymore. Caller can derive if needed.
        return []

    def _get_day_only() -> List[Dict[str, Any]]:
        # New schema: one doc per stock_id with top-level day array.
        doc = db[STOCK_HISTORY_COLLECTION].find_one({"stock_id": str(stock_id)}) or {}
        day_arr = doc.get("day")
        if isinstance(day_arr, list):
            return day_arr

        # Prior schema: nested candles.day
        candles_obj = doc.get("candles")
        if isinstance(candles_obj, dict):
            nested = candles_obj.get("day") or []
            return nested if isinstance(nested, list) else []

        # Old schema fallback (legacy per-timeframe doc)
        legacy = db[STOCK_HISTORY_COLLECTION].find_one({"stock_id": str(stock_id), "timeframe": "day"}, {"_id": 0, "candles": 1}) or {}
        candles = legacy.get("candles") or []
        return candles if isinstance(candles, list) else []

    return _get_day_only()


def refresh_stock_history_for_stock(
    *,
    db,
    zerodha_client: ZerodhaClient,
    stock: Dict[str, Any],
    now: Optional[datetime] = None,
    timeframes: Optional[List[str]] = None,
) -> Dict[str, Any]:
    """Fetch daily candles from Zerodha and store them.

    Intraday candles must never be written here.
    """

    now = now or datetime.utcnow()
    # Persistence rule: store ONLY daily candles.
    timeframes = timeframes or ["day"]

    stock_id = str(stock.get("stock_id") or "")
    token = stock.get("instrument_token")
    symbol = (stock.get("symbol") or "").strip().upper()
    exchange = (stock.get("exchange") or "NSE").strip().upper()

    if not stock_id or token is None or not symbol:
        return {"ok": False, "reason": "missing_stock_id_or_token", "symbol": symbol}

    results: Dict[str, Any] = {"ok": True, "symbol": symbol, "stock_id": stock_id, "updated": []}

    want = {str(tf).strip().lower() for tf in (timeframes or [])}
    want_day = "day" in want

    # Incremental fetch:
    # - If we have day_last_date, fetch only from that date onward (inclusive)
    # - Else bootstrap by fetching ~420 days
    existing_doc = db[STOCK_HISTORY_COLLECTION].find_one({"stock_id": str(stock_id)}, {"_id": 0, "day": 1, "day_last_date": 1, "candles": 1}) or {}
    existing_day = existing_doc.get("day")
    if not isinstance(existing_day, list):
        # Backward compat: nested candles.day
        candles_obj = existing_doc.get("candles")
        if isinstance(candles_obj, dict) and isinstance(candles_obj.get("day"), list):
            existing_day = candles_obj.get("day")
        else:
            existing_day = []

    last_s = existing_doc.get("day_last_date")
    last_dt: Optional[datetime] = None
    if isinstance(last_s, str) and last_s.strip():
        try:
            last_dt = datetime.fromisoformat(last_s.strip())
        except Exception:
            last_dt = None

    if last_dt is not None:
        # Fetch from the last stored day (inclusive) to guarantee we pick up revisions;
        # merge will dedupe.
        frm_dt = last_dt
    else:
        frm_dt = now - timedelta(days=420)

    frm_dt = _cap_from_date(now, frm_dt)
    frm = frm_dt.strftime("%Y-%m-%d")
    to = now.strftime("%Y-%m-%d")

    try:
        raw = zerodha_client.get_historical_data_records(int(token), interval="day", from_date=frm, to_date=to)
        incoming = _normalize_records(raw)
        if not incoming and not existing_day:
            return {"ok": False, "symbol": symbol, "stock_id": stock_id, "errors": [{"error": "no_day_candles_returned"}]}

        merged = _merge_candles_by_date(existing_day, incoming)
        if not merged:
            return {"ok": False, "symbol": symbol, "stock_id": stock_id, "errors": [{"error": "merge_produced_empty"}]}

        day_out: Optional[List[Dict[str, Any]]] = None
        if want_day:
            day_out = _trim(merged, STOCK_HISTORY_KEEP_DAY)
            if day_out:
                results["updated"].append("day")

        if day_out:
            upsert_stock_history_bundle(
                db=db,
                stock_id=stock_id,
                symbol=symbol,
                exchange=exchange,
                day=day_out,
                updated_at=now,
            )

        return results

    except Exception as e:
        logger.warning("History refresh failed | %s | %s", symbol, e)
        return {"ok": False, "symbol": symbol, "stock_id": stock_id, "errors": [{"error": str(e)}]}
