import asyncio
import logging
import os
import re
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional

from app.db.database import get_mongo_db
from app.v1.background.global_intraday import (
    _get_global_user_id,
    _get_global_account_id,
    _get_global_zerodha_client,
    _is_market_hours_ist,
)
from app.v1.libraries.nse_indices import fetch_equity_index_symbols, should_refresh_by_ttl
from app.v1.services.teGPT import analyze_symbol_service
from app.v1.services.teGPT import fetch_market_data
from app.v1.services.paper_trading import process_signal_and_market_update
from app.v1.utils.confidence import normalize_confidence
from app.v1.utils.symbol_ranking import score_market_data
from app.v1.utils.snapshot_sanitize import compact_analysis_for_persistence


logger = logging.getLogger(__name__)

_INDEX_NAME = "NIFTY 50"

# Loop interval (minutes)
REFRESH_MINUTES = int(os.getenv("NIFTY50_REFRESH_MINUTES", "60"))
# Constituents refresh TTL (hours): 24 = daily, 168 = weekly
CRAWL_TTL_HOURS = int(os.getenv("NIFTY50_CRAWL_TTL_HOURS", "24"))
# Snapshot freshness (minutes) - skip re-analysis if a recent snapshot already exists
FRESHNESS_MINUTES = int(os.getenv("NIFTY50_FRESHNESS_MINUTES", "60"))
# Bound work per cycle
MAX_ANALYSES_PER_CYCLE = int(os.getenv("NIFTY50_MAX_ANALYSES", "50"))

# New: rank all 50 first, then GPT only on top N
NIFTY50_GPT_TOP_N = int(os.getenv("NIFTY50_GPT_TOP_N", "25"))
NIFTY50_FEATURES_REFRESH_MINUTES = int(os.getenv("NIFTY50_FEATURES_REFRESH_MINUTES", "10"))

_NIFTY50_LOCK = asyncio.Lock()


def _compact_market_data_no_candles(md: Any) -> Dict[str, Any]:
    """Persist-safe market_data.

    Strict rule: do NOT persist candles/quote snapshots (especially intraday).
    Keep only derived/computed fields that are useful for later UI/context.
    """

    if not isinstance(md, dict):
        return {}
    out: Dict[str, Any] = {}
    for k in ("instrument_token", "stock_id", "indicators", "strategies", "pivots", "fib", "error"):
        v = md.get(k)
        if v is not None:
            out[k] = v
    return out


def _clamp_int(v: int, lo: int, hi: int) -> int:
    try:
        v = int(v)
    except Exception:
        return lo
    return max(lo, min(hi, v))


def _ensure_stock_identity(db, symbol: str, exchange: str = "NSE") -> Optional[Dict[str, Any]]:
    """Read-only stock lookup.

    Non-negotiable: no manual stock creation.
    """

    sym = (symbol or "").strip().upper()
    if not sym:
        return None

    exchange = (exchange or "NSE").strip().upper()
    stock = db["stocks"].find_one({"symbol": sym, "exchange": exchange}) or db["stocks"].find_one({"symbol": sym})
    if not isinstance(stock, dict):
        return None

    if not stock.get("stock_id") and stock.get("_id") is not None:
        try:
            sid = str(stock.get("_id"))
            db["stocks"].update_one({"_id": stock["_id"]}, {"$set": {"stock_id": sid}})
            stock["stock_id"] = sid
        except Exception:
            pass

    return stock


def _refresh_constituents_if_needed(db) -> List[str]:
    now = datetime.utcnow()

    ttl_hours = _clamp_int(CRAWL_TTL_HOURS, 1, 24 * 30)

    doc = db["index_constituents"].find_one({"index": _INDEX_NAME})
    fetched_at = doc.get("fetched_at") if isinstance(doc, dict) else None

    if isinstance(doc, dict) and isinstance(doc.get("symbols"), list) and not should_refresh_by_ttl(
        fetched_at,
        ttl_hours=ttl_hours,
        now_utc=now,
    ):
        return [str(s).strip().upper() for s in (doc.get("symbols") or []) if str(s).strip()]

    symbols = fetch_equity_index_symbols(_INDEX_NAME)
    if not symbols:
        # Keep old list on fetch failures
        if isinstance(doc, dict) and isinstance(doc.get("symbols"), list):
            return [str(s).strip().upper() for s in (doc.get("symbols") or []) if str(s).strip()]
        return []

    # Ensure stocks exist
    for sym in symbols:
        try:
            _ensure_stock_identity(db, sym)
        except Exception:
            logger.exception("[NIFTY50] Failed ensuring stock identity for %s", sym)

    db["index_constituents"].update_one(
        {"index": _INDEX_NAME},
        {
            "$set": {
                "index": _INDEX_NAME,
                "symbols": symbols,
                "fetched_at": now,
                "source": "nseindia",
                "ttl_hours": ttl_hours,
            },
            "$setOnInsert": {"created_at": now},
        },
        upsert=True,
    )

    logger.info("[NIFTY50] Constituents refreshed | symbols=%d ttl_hours=%d", len(symbols), ttl_hours)
    return symbols


def _latest_snapshot_stock_ids(db, stock_ids: List[str], cutoff: datetime) -> set:
    if not stock_ids:
        return set()
    try:
        ids = db["stock_analysis_snapshots"].distinct(
            "stock_id",
            {"stock_id": {"$in": stock_ids}, "timestamp": {"$gte": cutoff}},
        )
        return {str(x) for x in (ids or []) if x}
    except Exception:
        logger.exception("[NIFTY50] Failed to compute fresh snapshot ids")
        return set()


def _run_nifty50_cycle(db) -> None:
    if not _is_market_hours_ist():
        from app.v1.utils.market_time import backend_market_window, format_window, local_time_str

        logger.info(
            "[NIFTY50] Backend skipped due to time | now=%s | window=%s",
            local_time_str(),
            format_window(backend_market_window()),
        )
        return

    # Only refresh constituents during backend market window.
    symbols = _refresh_constituents_if_needed(db)
    if not symbols:
        logger.info("[NIFTY50] No constituents available; skipping")
        return

    zerodha_client = _get_global_zerodha_client(db)
    if not zerodha_client:
        logger.error("[NIFTY50] Skipping cycle: global Zerodha client unavailable")
        return

    user_id = _get_global_user_id(db)
    if not user_id:
        logger.error("[NIFTY50] Global user_id missing; skipping")
        return

    account_id = _get_global_account_id(db)

    # Map symbols -> stock_id via `stocks`
    stocks = list(db["stocks"].find({"symbol": {"$in": symbols}}, {"stock_id": 1, "symbol": 1, "is_active": 1}))
    by_symbol = {(s.get("symbol") or "").strip().upper(): s for s in stocks if s.get("stock_id")}

    eligible: List[Dict[str, Any]] = []
    for sym in symbols:
        row = by_symbol.get(sym)
        if not row:
            continue
        if row.get("is_active") is False:
            continue
        eligible.append(row)

    if not eligible:
        logger.info("[NIFTY50] No eligible stocks; skipping")
        return

    # ---- Compute indicators/strategies for ALL 50 (algorithmic ranking) ----
    features_refresh = _clamp_int(NIFTY50_FEATURES_REFRESH_MINUTES, 1, 60)
    feat_cutoff = datetime.utcnow() - timedelta(minutes=features_refresh)

    scored_rows: List[Dict[str, Any]] = []
    for r in eligible:
        sid = str(r.get("stock_id"))
        sym = (r.get("symbol") or "").strip().upper()
        if not sid or not sym:
            continue

        md: Dict[str, Any] = {}

        # Prefer reusing recent computed features from snapshots to reduce Zerodha load.
        try:
            snap = db["stock_analysis_snapshots"].find_one(
                {"stock_id": sid, "timestamp": {"$gte": feat_cutoff}},
                sort=[("timestamp", -1), ("_id", -1)],
                projection={"_id": 0, "market_data": 1, "features": 1, "timestamp": 1},
            )
            if snap and isinstance(snap.get("market_data"), dict):
                md = snap.get("market_data") or {}
            elif snap and isinstance(snap.get("features"), dict):
                # Build a minimal md-like object for scoring
                md = {
                    "indicators": (snap.get("features") or {}).get("indicators"),
                    "strategies": (snap.get("features") or {}).get("strategies"),
                }
        except Exception:
            md = {}

        if not md or not isinstance(md, dict) or (not md.get("indicators") and not md.get("strategies")):
            try:
                md = fetch_market_data(
                    zerodha_client=zerodha_client,
                    symbol=sym,
                    timeframes=["5minute", "15minute", "day"],
                    db=db,
                    include_quote=False,
                )
            except Exception:
                logger.exception("[NIFTY50] Feature compute failed for %s", sym)
                md = {}

        s = score_market_data(md)
        scored_rows.append({"stock_id": sid, "symbol": sym, "score": float(s.get("opportunity") or 0.0), "direction": s.get("direction"), "quality": float(s.get("quality") or 0.0)})

    if not scored_rows:
        logger.info("[NIFTY50] No scored rows; skipping")
        return

    scored_rows.sort(key=lambda x: (float(x.get("score") or 0.0), float(x.get("quality") or 0.0)), reverse=True)

    top_n = _clamp_int(NIFTY50_GPT_TOP_N, 1, 50)
    ranked = scored_rows[:top_n]

    # High-signal ranked list (symbols only)
    try:
        logger.info("[NIFTY50] Ranked symbols | top=%s", [r.get("symbol") for r in ranked if r.get("symbol")])
    except Exception:
        logger.debug("[NIFTY50] Failed to log ranked symbols", exc_info=True)

    # ---- GPT only for top ranked symbols, still respecting snapshot freshness ----
    freshness_minutes = _clamp_int(FRESHNESS_MINUTES, 1, 24 * 60)
    cutoff = datetime.utcnow() - timedelta(minutes=freshness_minutes)
    eligible_ids = [str(x.get("stock_id")) for x in ranked if x.get("stock_id")]
    fresh_ids = _latest_snapshot_stock_ids(db, eligible_ids, cutoff=cutoff)

    to_analyze: List[Dict[str, Any]] = [x for x in ranked if str(x.get("stock_id")) not in fresh_ids]
    if not to_analyze:
        logger.info("[NIFTY50] Top ranked all fresh (<=%d min); skipping", freshness_minutes)
        return

    max_per_cycle = _clamp_int(MAX_ANALYSES_PER_CYCLE, 1, 500)
    to_analyze = to_analyze[:max_per_cycle]

    analyzed = 0
    attempted_symbols: List[str] = []
    zerodha_ok_symbols: List[str] = []
    gpt_ok_rows: List[Dict[str, Any]] = []
    for r in to_analyze:
        sid = str(r.get("stock_id"))
        sym = (r.get("symbol") or "").strip().upper()
        if not sid or not sym:
            continue

        attempted_symbols.append(sym)

        try:
            analysis = analyze_symbol_service(
                db=db,
                zerodha_client=zerodha_client,
                symbol=sym,
                timeframes=["5minute", "15minute", "30minute", "day", "week", "month"],
                question="general intraday analysis",
                context="nifty50",
                user_id=user_id,
                include_market_data=True,
            )
        except Exception as e:
            logger.exception("[NIFTY50] Analysis failed for %s: %s", sym, e)
            continue

        # Track Zerodha success (market_data candles)
        try:
            md = analysis.get("market_data") if isinstance(analysis, dict) else None
            md = md if isinstance(md, dict) else {}
            candles = md.get("candles")
            total_candles = 0
            if isinstance(candles, dict):
                total_candles = sum(len(v or []) for v in candles.values() if isinstance(v, list))
            if md.get("error") is None and total_candles > 0:
                zerodha_ok_symbols.append(sym)
        except Exception:
            logger.debug("[NIFTY50] Zerodha success check failed for %s", sym, exc_info=True)

        targets = analysis.get("targets") if isinstance(analysis, dict) else None
        primary_target = targets[0] if isinstance(targets, list) and targets else None

        conf = normalize_confidence(
            analysis.get("confidence") if isinstance(analysis, dict) else None,
            decision_probability=analysis.get("decision_probability") if isinstance(analysis, dict) else None,
            score=analysis.get("score") if isinstance(analysis, dict) else None,
        )
        if isinstance(analysis, dict):
            analysis["confidence"] = conf

        # Track GPT success
        try:
            if isinstance(analysis, dict):
                gpt_err = analysis.get("gpt_error_type") or analysis.get("error")
                decision = (analysis.get("decision") or "").strip().upper()
                prob = analysis.get("decision_probability")
                if not gpt_err and decision in ("BUY", "SELL", "HOLD"):
                    gpt_ok_rows.append({"symbol": sym, "decision": decision, "prob": prob, "confidence": conf})
        except Exception:
            logger.debug("[NIFTY50] GPT success check failed for %s", sym, exc_info=True)

        snapshot_doc: Dict[str, Any] = {
            "stock_id": sid,
            "timestamp": datetime.utcnow(),
            "decision": analysis.get("decision"),
            "confidence": conf,
            "entry": analysis.get("entry_price"),
            "stop_loss": analysis.get("stop_loss"),
            "target": analysis.get("price_target") or analysis.get("target") or primary_target,
            "reason": analysis.get("rationale"),
            "source": "NIFTY50",
            "analysis": compact_analysis_for_persistence(analysis),
            "market_data": _compact_market_data_no_candles(analysis.get("market_data") if isinstance(analysis, dict) else None),
            "features": analysis.get("features") if isinstance(analysis, dict) else None,
        }

        snapshot_id = None
        try:
            ins = db["stock_analysis_snapshots"].insert_one(snapshot_doc)
            snapshot_id = str(ins.inserted_id)
        except Exception:
            logger.exception("[NIFTY50] Failed to persist analysis snapshot for %s", sym)

        # Paper trading simulation (DB-only): update OPEN paper trades with latest candle,
        # then create a new one when the signal is VERY_STRONG.
        try:
            process_signal_and_market_update(
                db=db,
                user_id=user_id,
                account_id=account_id,
                stock_id=sid,
                symbol=sym,
                analysis=analysis,
                market_data=analysis.get("market_data") if isinstance(analysis, dict) else None,
                source="NIFTY50",
                preferred_timeframe="60minute",
                snapshot_id=snapshot_id,
                snapshot_timestamp=snapshot_doc.get("timestamp"),
            )
        except Exception:
            logger.exception("[NIFTY50] Paper trading simulation failed for %s", sym)

        analyzed += 1

    logger.info("[NIFTY50] Cycle finished | analyzed=%d stale=%d", analyzed, len(to_analyze))
    try:
        logger.info(
            "[NIFTY50] Cycle summary | attempted=%s | zerodha_ok=%s | gpt_ok=%s",
            attempted_symbols,
            zerodha_ok_symbols,
            gpt_ok_rows,
        )
    except Exception:
        logger.debug("[NIFTY50] Failed to log cycle summary", exc_info=True)


def _run_nifty50_cycle_sync() -> None:
    db_gen = get_mongo_db()
    db = next(db_gen)
    try:
        _run_nifty50_cycle(db)
    finally:
        try:
            db_gen.close()
        except Exception:
            logger.debug("[NIFTY50] Error closing DB generator", exc_info=True)


async def nifty50_intraday_loop(interval_seconds: Optional[int] = None) -> None:
    """Background loop for NIFTY50 constituents.

    - Refreshes constituents list on a TTL (daily/weekly) via NSE API
    - Runs analysis only during IST market hours
    - Writes snapshots to `stock_analysis_snapshots` with source="NIFTY50"

    IMPORTANT: API endpoints must remain DB-only; this loop does Zerodha/GPT.
    """

    await asyncio.sleep(5)

    refresh_minutes = _clamp_int(REFRESH_MINUTES, 5, 24 * 60)
    interval = interval_seconds if isinstance(interval_seconds, int) and interval_seconds > 0 else refresh_minutes * 60

    logger.info("[NIFTY50] Background loop started (interval=%ss)", interval)

    while True:
        if not _is_market_hours_ist():
            from app.v1.utils.market_time import backend_market_window, format_window, local_time_str

            logger.info(
                "[NIFTY50] Backend skipped due to time | now=%s | window=%s",
                local_time_str(),
                format_window(backend_market_window()),
            )
            await asyncio.sleep(interval)
            continue

        if _NIFTY50_LOCK.locked():
            logger.info("[NIFTY50] Previous cycle still running; skipping this tick")
            await asyncio.sleep(interval)
            continue

        async with _NIFTY50_LOCK:
            try:
                await asyncio.to_thread(_run_nifty50_cycle_sync)
            except Exception:
                logger.exception("[NIFTY50] Unexpected error in cycle")

        await asyncio.sleep(interval)
