"""Analysis + chat + orders + misc endpoints for platform teGPT.

NOTE: Move-only split from `teGPT.py`.
"""

from fastapi import APIRouter, Depends, HTTPException, Body, Query
from typing import Dict, Any, List, Optional
import logging
import os
from datetime import datetime, timedelta
import threading
import uuid

from app.db import database
from app.v1.dependencies.auth import get_current_userdetails
from app.v1.services.teGPT import (
    chat_with_stock_service,
    get_user_signals_service,
    get_zerodha_client_service,
    place_order_service,
)

from app.v1.services.stock_history import get_stock_history
from app.v1.services.early_movers import build_early_movers_snapshot

from app.v1.utils.confidence import normalize_confidence

from app.v1.services.intraday_watchlist import ist_date_str

from app.v1.utils.market_time import (
    backend_market_window,
    format_window,
    is_backend_market_hours,
    local_time_str,
    market_tz_name,
)

from datetime import timezone


_IST = timezone(timedelta(hours=5, minutes=30))


def _ist_now(now_utc: Optional[datetime] = None) -> datetime:
    now = now_utc or datetime.utcnow()
    try:
        return now.replace(tzinfo=timezone.utc).astimezone(_IST)
    except Exception:
        return datetime.now(tz=_IST)


def _parse_hhmm(v: str, *, default_h: int, default_m: int) -> tuple[int, int]:
    try:
        s = (v or "").strip()
        if not s or ":" not in s:
            return default_h, default_m
        hh, mm = s.split(":", 1)
        h = int(hh)
        m = int(mm)
        if 0 <= h <= 23 and 0 <= m <= 59:
            return h, m
    except Exception:
        pass
    return default_h, default_m


def _default_early_movers_date_key(*, today_ist: str, now_utc: Optional[datetime] = None) -> Dict[str, Any]:
    """Return preferred early movers snapshot date key.

    Rule: after 20:00 IST, prefer next trading day; else prefer today.
    """

    cutoff_raw = os.getenv("EARLY_MOVERS_DEFAULT_NEXT_DAY_AFTER_HHMM", "20:00")
    ch, cm = _parse_hhmm(cutoff_raw, default_h=20, default_m=0)
    now_ist = _ist_now(now_utc)
    after_cutoff = (now_ist.hour, now_ist.minute) >= (ch, cm)
    tomorrow_ist = _next_trading_day_key(today_ist) or today_ist
    preferred = tomorrow_ist if after_cutoff else today_ist
    return {
        "preferred": preferred,
        "today_ist": today_ist,
        "tomorrow_ist": tomorrow_ist,
        "after_cutoff": after_cutoff,
        "cutoff_hhmm": f"{ch:02d}:{cm:02d}",
    }

from .teGPT_helpers import (
    _confidence_score,
    _extract_analysis_fields,
    _format_analysis_for_stream_row,
    _get_latest_snapshot_by_symbol,
    _get_symbols_from_live_movers,
    _normalize_mover_param,
    _what_changed,
)

router = APIRouter()
logger = logging.getLogger(__name__)


def _next_trading_day_key(date_key: str) -> Optional[str]:
    try:
        base = datetime.fromisoformat(str(date_key).strip()).date()
    except Exception:
        return None
    d = base + timedelta(days=1)
    while d.weekday() >= 5:
        d = d + timedelta(days=1)
    return d.isoformat()


def _get_system_meta_value(db, key: str) -> Optional[str]:
    doc = db["system_meta"].find_one({"key": key})
    if not doc:
        return None
    v = doc.get("value")
    return None if v is None else str(v)


def _set_system_meta_value(db, key: str, value: str) -> None:
    db["system_meta"].update_one(
        {"key": key},
        {"$set": {"key": key, "value": value, "updated_at": datetime.utcnow()}},
        upsert=True,
    )


def _acquire_meta_lock(db, *, key: str, owner: str, ttl_seconds: int) -> bool:
    now = datetime.utcnow()
    expires = now + timedelta(seconds=max(30, int(ttl_seconds)))

    # IMPORTANT: Do not use upsert with a conditional filter here.
    # If a lock doc exists but does not match the condition (lock held), upsert
    # attempts to INSERT a new doc with the same unique `key` and triggers
    # DuplicateKeyError. Instead:
    # 1) Try insert (fast path when no lock doc exists)
    # 2) If exists, try conditional update (only if expired or same owner)
    try:
        db["system_meta"].insert_one({"key": key, "owner": owner, "expires_at": expires, "updated_at": now})
        return True
    except Exception as e:
        # If a doc already exists with this unique key, we'll attempt the conditional update below.
        # For other errors, bubble up so callers can surface the real failure.
        msg = str(e)
        code = getattr(e, "code", None)
        is_dup = (code == 11000) or ("E11000" in msg) or ("duplicate key" in msg.lower())
        if not is_dup:
            raise

    res = db["system_meta"].update_one(
        {
            "key": key,
            "$or": [
                {"expires_at": {"$lt": now}},
                {"expires_at": {"$exists": False}},
                {"owner": owner},
            ],
        },
        {"$set": {"owner": owner, "expires_at": expires, "updated_at": now}},
        upsert=False,
    )
    return res.modified_count == 1


def _release_meta_lock(db, *, key: str, owner: str) -> None:
    try:
        db["system_meta"].update_one(
            {"key": key, "owner": owner},
            {"$set": {"expires_at": datetime.utcnow(), "updated_at": datetime.utcnow()}},
        )
    except Exception:
        logger.debug("Early movers manual: error releasing lock", exc_info=True)


def _find_early_mover_entry(snapshot: Dict[str, Any], symbol: str) -> Dict[str, Any]:
    sym = (symbol or "").strip().upper()
    top = snapshot.get("top") if isinstance(snapshot.get("top"), dict) else {}
    bullish = top.get("bullish") if isinstance(top.get("bullish"), list) else []
    bearish = top.get("bearish") if isinstance(top.get("bearish"), list) else []

    for idx, it in enumerate(bullish):
        if isinstance(it, dict) and (it.get("symbol") or "").strip().upper() == sym:
            return {"bias": "BULLISH", "rank": idx + 1, "entry": it, "total_in_bias": len(bullish)}
    for idx, it in enumerate(bearish):
        if isinstance(it, dict) and (it.get("symbol") or "").strip().upper() == sym:
            return {"bias": "BEARISH", "rank": idx + 1, "entry": it, "total_in_bias": len(bearish)}

    return {"bias": None, "rank": None, "entry": None, "total_in_bias": None}


def _early_mover_algo_explanation(*, bias: Optional[str], algo: Dict[str, Any]) -> List[str]:
    out: List[str] = []
    b = (bias or "").strip().upper()
    key_level = algo.get("key_level")
    dist = algo.get("distance_to_level_pct")
    atr_ratio = algo.get("atr_ratio")
    vol_ratio = algo.get("vol_ratio")
    rs5d = algo.get("relative_strength_5d")
    sma20d = algo.get("sma20_dist_pct")

    if b == "BULLISH":
        out.append("Setup: bullish early breakout watch")
        if key_level is not None:
            out.append(f"Key resistance level: {key_level}")
        if dist is not None:
            out.append(f"Distance to resistance (%): {dist}")
    elif b == "BEARISH":
        out.append("Setup: bearish early breakdown watch")
        if key_level is not None:
            out.append(f"Key support level: {key_level}")
        if dist is not None:
            out.append(f"Distance to support (%): {dist}")

    if atr_ratio is not None:
        out.append(f"Volatility contraction (ATR ratio): {atr_ratio}")
    if vol_ratio is not None:
        out.append(f"Volume condition (vol ratio vs 20D): {vol_ratio}")
    if rs5d is not None:
        out.append(f"Relative strength vs index (5D): {rs5d}")
    if sma20d is not None:
        out.append(f"SMA20 proximity (%): {sma20d}")

    return out


@router.get("/early-movers/study/{symbol}", summary="DB-only: Early Mover Advantage study")
async def early_mover_study(
    symbol: str,
    snapshot_date: Optional[str] = Query(None, description="Optional snapshot date (YYYY-MM-DD). Defaults to today IST else latest."),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Return the early-movers snapshot entry + daily candles for a symbol.

    IMPORTANT: DB-only. Must not call Zerodha/ET/GPT.
    Weekly/monthly candles are derived by the frontend from daily candles.
    """
    sym = (symbol or "").strip().upper()
    if not sym:
        raise HTTPException(status_code=400, detail="symbol is required")

    coll = db[os.getenv("EARLY_MOVERS_SNAPSHOT_COLLECTION", "early_movers_snapshots")]

    snap = None
    if snapshot_date and str(snapshot_date).strip():
        snap = coll.find_one({"date": str(snapshot_date).strip()})

    if not snap:
        ist_date = ist_date_str()
        snap = coll.find_one({"date": ist_date})
        if not snap:
            snap = coll.find_one({}, sort=[("date", -1)])

    if not isinstance(snap, dict):
        raise HTTPException(status_code=404, detail="No early movers snapshot available yet")

    found = _find_early_mover_entry(snap, sym)
    entry = found.get("entry") if isinstance(found.get("entry"), dict) else {}
    bias = found.get("bias")
    rank = found.get("rank")

    algo = {
        "key_level": entry.get("key_level"),
        "distance_to_level_pct": entry.get("distance_to_level_pct"),
        "atr10": entry.get("atr10"),
        "atr_ratio": entry.get("atr_ratio"),
        "vol_ratio": entry.get("vol_ratio"),
        "relative_strength_5d": entry.get("relative_strength_5d"),
        "sma20_dist_pct": entry.get("sma20_dist_pct"),
        "market_context": entry.get("market_context"),
    }

    gpt = entry.get("gpt") if isinstance(entry.get("gpt"), dict) else None
    risk_flags = entry.get("risk_flags") if isinstance(entry.get("risk_flags"), list) else []

    stock_doc = db["stocks"].find_one({"symbol": sym, "exchange": "NSE"}, {"_id": 0, "stock_id": 1, "symbol": 1}) or {}
    stock_id = stock_doc.get("stock_id")
    daily = get_stock_history(db, str(stock_id or ""), "day") if stock_id else []
    if not isinstance(daily, list):
        daily = []

    # Fallback: compute RS(5D) from DB candles + snapshot market nifty_5d_return
    # (DB-only; does not call Zerodha/GPT)
    if algo.get("relative_strength_5d") is None and daily:
        try:
            def _dk(x: Dict[str, Any]) -> str:
                d = x.get("date")
                return str(d)[:10] if d else ""

            daily_sorted = sorted([c for c in daily if isinstance(c, dict)], key=_dk)
            closes: List[float] = []
            for c in daily_sorted:
                try:
                    cl = float(c.get("close"))
                    if cl > 0:
                        closes.append(cl)
                except Exception:
                    continue

            mkt = snap.get("market") if isinstance(snap.get("market"), dict) else {}
            nifty_5d = float(mkt.get("nifty_5d_return")) if isinstance(mkt.get("nifty_5d_return"), (int, float)) else None

            if nifty_5d is not None and len(closes) >= 6:
                stock_5d = (closes[-1] / closes[-6] - 1.0) * 100.0
                algo["relative_strength_5d"] = round(stock_5d - nifty_5d, 3)
        except Exception:
            pass

    return {
        "symbol": sym,
        "snapshot_date": snap.get("date"),
        "snapshot_generated_at": snap.get("generated_at"),
        "market": snap.get("market"),
        "universe": snap.get("universe"),
        "bias": bias,
        "rank_in_bias": rank,
        "total_in_bias": found.get("total_in_bias"),
        "algo_score": entry.get("algo_score") if entry else None,
        "final_score": entry.get("final_score") if entry else None,
        "algo": algo,
        "gpt": gpt,
        "snapshot_gpt": snap.get("gpt") if isinstance(snap.get("gpt"), dict) else None,
        "risk_flags": risk_flags,
        "algo_explanation": _early_mover_algo_explanation(bias=bias, algo=algo),
        "candles": {"daily": daily},
    }


@router.post("/early-movers/run", summary="Run early movers snapshot on demand")
async def run_early_movers(
    target_date: Optional[str] = Query(None, description="IST date to build snapshot for (YYYY-MM-DD). If omitted, uses next trading day from latest stock-history EOD meta."),
    force: bool = Query(False, description="If true, allow re-run even if snapshot already exists."),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Manual trigger (for an admin button).

    NOTE: This may call GPT if `EARLY_MOVERS_GPT_MODE=rerank`.
    """
    allow_during = str(os.getenv("EARLY_MOVERS_MANUAL_ALLOW_DURING_MARKET_HOURS", "0")).strip().lower() in ("1", "true", "yes")
    logger.info(
        "Early movers manual run requested | target_date=%s force=%s allow_during_mkt=%s now=%s in_mkt=%s",
        (target_date or "").strip() if target_date else None,
        bool(force),
        bool(allow_during),
        local_time_str(),
        bool(is_backend_market_hours()),
    )
    if (not allow_during) and is_backend_market_hours():
        w = backend_market_window()
        raise HTTPException(
            status_code=409,
            detail=(
                f"Manual Early Movers run is disabled during market hours "
                f"({format_window(w)} {market_tz_name()}). Now: {local_time_str()}"
            ),
        )

    snapshot_coll = db[os.getenv("EARLY_MOVERS_SNAPSHOT_COLLECTION", "early_movers_snapshots")]
    lock_key = os.getenv("EARLY_MOVERS_LOCK_META_KEY", "early_movers_snapshot_lock")
    lock_ttl = int(os.getenv("EARLY_MOVERS_LOCK_TTL_SECONDS", "1800"))
    owner = f"manual:pid={os.getpid()}"

    jobs_coll_name = os.getenv("EARLY_MOVERS_JOBS_COLLECTION", "system_jobs")

    td = (target_date or "").strip() if target_date else ""
    if not td:
        eod_key = os.getenv("STOCK_HISTORY_LAST_EOD_META_KEY", "stock_history_refresh_last_eod_run")
        last_eod = _get_system_meta_value(db, eod_key)
        if not last_eod:
            raise HTTPException(status_code=409, detail=f"Missing system_meta.{eod_key}")
        td = _next_trading_day_key(last_eod)
        if not td:
            raise HTTPException(status_code=400, detail="Could not compute target_date")

    if (not force) and snapshot_coll.find_one({"date": td}, {"_id": 1}):
        return {"ok": True, "skipped": True, "reason": "snapshot_exists", "date": td}

    if not _acquire_meta_lock(db, key=lock_key, owner=owner, ttl_seconds=lock_ttl):
        raise HTTPException(status_code=409, detail="Early movers is busy (lock held)")

    job_id = str(uuid.uuid4())
    job_doc = {
        "_id": job_id,
        "type": "early_movers_run",
        "status": "queued",
        "created_at": datetime.utcnow(),
        "target_date": td,
        "force": bool(force),
        "requested_by": getattr(current_user, "get", lambda k, d=None: None)("email", None)
        if isinstance(current_user, dict)
        else None,
        "meta": {"owner": owner, "lock_key": lock_key, "lock_ttl": int(lock_ttl)},
    }
    try:
        db[jobs_coll_name].update_one({"_id": job_id}, {"$set": job_doc}, upsert=True)
    except Exception:
        # If job doc cannot be persisted, don't start the job.
        _release_meta_lock(db, key=lock_key, owner=owner)
        raise HTTPException(status_code=500, detail="Failed to create job record")

    def _run_job_in_background(*, job_id: str, td: str, force: bool, owner: str, lock_key: str, lock_ttl: int, jobs_coll: str):
        client = None
        try:
            from app.db import database as _dbmod
            from app.v1.background.global_intraday import _get_global_zerodha_client

            client, job_db = _dbmod.create_mongo_client_and_db()
            job_db[jobs_coll].update_one(
                {"_id": job_id},
                {"$set": {"status": "running", "started_at": datetime.utcnow(), "updated_at": datetime.utcnow()}},
            )

            # Recheck under lock in the background context too.
            snapshot_coll2 = job_db[os.getenv("EARLY_MOVERS_SNAPSHOT_COLLECTION", "early_movers_snapshots")]
            if (not force) and snapshot_coll2.find_one({"date": td}, {"_id": 1}):
                job_db[jobs_coll].update_one(
                    {"_id": job_id},
                    {
                        "$set": {
                            "status": "skipped",
                            "finished_at": datetime.utcnow(),
                            "updated_at": datetime.utcnow(),
                            "result": {"ok": True, "skipped": True, "reason": "snapshot_exists", "date": td},
                        }
                    },
                )
                return

            z = _get_global_zerodha_client(job_db)
            if not z:
                raise RuntimeError("Global Zerodha client unavailable")

            t0 = datetime.utcnow()
            result = build_early_movers_snapshot(db=job_db, zerodha_client=z, ist_date=td)
            dt_s = (datetime.utcnow() - t0).total_seconds()
            job_db[jobs_coll].update_one(
                {"_id": job_id},
                {
                    "$set": {
                        "status": "ok",
                        "finished_at": datetime.utcnow(),
                        "updated_at": datetime.utcnow(),
                        "duration_seconds": float(dt_s),
                        "result": result,
                    }
                },
            )
        except Exception as e:
            try:
                if client is None:
                    from app.db import database as _dbmod
                    client, job_db = _dbmod.create_mongo_client_and_db()
                job_db[jobs_coll].update_one(
                    {"_id": job_id},
                    {
                        "$set": {
                            "status": "error",
                            "finished_at": datetime.utcnow(),
                            "updated_at": datetime.utcnow(),
                            "error": str(e),
                        }
                    },
                )
            except Exception:
                pass
        finally:
            try:
                if client is not None:
                    # Release lock using a fresh DB handle.
                    from app.db import database as _dbmod
                    # reuse job_db if present
                    _release_meta_lock(job_db, key=lock_key, owner=owner)
            except Exception:
                pass
            try:
                if client is not None:
                    client.close()
            except Exception:
                pass

    try:
        t = threading.Thread(
            target=_run_job_in_background,
            kwargs={
                "job_id": job_id,
                "td": td,
                "force": bool(force),
                "owner": owner,
                "lock_key": lock_key,
                "lock_ttl": int(lock_ttl),
                "jobs_coll": jobs_coll_name,
            },
            daemon=True,
        )
        t.start()
    except Exception:
        _release_meta_lock(db, key=lock_key, owner=owner)
        db[jobs_coll_name].update_one(
            {"_id": job_id},
            {"$set": {"status": "error", "finished_at": datetime.utcnow(), "updated_at": datetime.utcnow(), "error": "Failed to start background thread"}},
        )
        raise HTTPException(status_code=500, detail="Failed to start background job")

    # Return immediately; job runs in background.
    return {"ok": True, "enqueued": True, "job_id": job_id, "date": td}


@router.get("/early-movers/run/status", summary="Get status of an early movers manual run job")
async def early_movers_run_status(
    job_id: str = Query(..., description="Job id returned by POST /early-movers/run"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    jobs_coll_name = os.getenv("EARLY_MOVERS_JOBS_COLLECTION", "system_jobs")
    doc = db[jobs_coll_name].find_one({"_id": str(job_id)}, {"_id": 1, "type": 1, "status": 1, "created_at": 1, "started_at": 1, "finished_at": 1, "target_date": 1, "force": 1, "duration_seconds": 1, "error": 1, "result": 1})
    if not doc:
        raise HTTPException(status_code=404, detail="Job not found")
    # Ensure only our job type is returned.
    if (doc.get("type") or "") != "early_movers_run":
        raise HTTPException(status_code=404, detail="Job not found")
    return {"ok": True, "job": doc}


# ============ ANALYSIS ============


@router.post("/analyze/{symbol}", summary="Analyze single symbol using ChatGPT")
async def analyze_symbol(
    symbol: str,
    payload: Dict[str, Any] = Body(default={}),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """DB-only analysis endpoint used by Stream UI.

    IMPORTANT: This endpoint must NOT call Zerodha or GPT.
    It returns the latest persisted analysis snapshot from
    `stock_analysis_snapshots` for the requested symbol.
    """
    try:
        snap = _get_latest_snapshot_by_symbol(db, symbol)
        if not snap:
            raise HTTPException(status_code=404, detail="No analysis snapshot available yet")

        analysis = snap.get("analysis") or {}
        if isinstance(analysis, dict):
            analysis.setdefault("symbol", (symbol or "").strip().upper())
            ts = snap.get("timestamp")
            if isinstance(ts, datetime):
                analysis.setdefault("timestamp", ts.isoformat())

            analysis["confidence"] = normalize_confidence(
                analysis.get("confidence") or analysis.get("confidence_level"),
                decision_probability=analysis.get("decision_probability"),
                score=analysis.get("score"),
            )

            # Include persisted market_data (candles/quote) when available.
            # This remains DB-only and enables charting in the UI.
            md_snap = snap.get("market_data")
            md_analysis = analysis.get("market_data")
            if isinstance(md_snap, dict) and md_snap:
                # Merge snapshot market_data into analysis.market_data.
                # This is important when older analysis embedded only some timeframes,
                # but the snapshot root has the complete candle set.
                merged: Dict[str, Any] = {}
                if isinstance(md_analysis, dict) and md_analysis:
                    merged.update(md_analysis)

                # Always prefer snapshot quote if analysis quote is missing/empty.
                if (not isinstance(merged.get("quote"), dict) or not merged.get("quote")) and isinstance(
                    md_snap.get("quote"), dict
                ):
                    merged["quote"] = md_snap.get("quote")

                # Prefer snapshot instrument_token if missing.
                if merged.get("instrument_token") is None and md_snap.get("instrument_token") is not None:
                    merged["instrument_token"] = md_snap.get("instrument_token")

                # Merge candles timeframe-by-timeframe, filling missing or empty arrays.
                c_analysis = merged.get("candles")
                c_snap = md_snap.get("candles")
                if isinstance(c_snap, dict) and c_snap:
                    out_candles: Dict[str, Any] = {}
                    if isinstance(c_analysis, dict):
                        out_candles.update(c_analysis)
                    for tf, rows in c_snap.items():
                        if tf not in out_candles or not (isinstance(out_candles.get(tf), list) and len(out_candles.get(tf))):
                            out_candles[tf] = rows
                    merged["candles"] = out_candles
                elif (not isinstance(c_analysis, dict) or not c_analysis) and c_snap is not None:
                    # Rare case: candles stored as array.
                    merged["candles"] = c_snap

                # Fill top-level timestamp if missing
                if merged.get("timestamp") is None and md_snap.get("timestamp") is not None:
                    merged["timestamp"] = md_snap.get("timestamp")

                analysis["market_data"] = merged

        return {"status": "success", "symbol": symbol, "analysis": analysis}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Failed to fetch DB analysis snapshot for %s", symbol)
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/alerts-legacy", summary="Get user-specific alerts (legacy, DB-only)")
async def get_user_alerts(
    limit: int = Query(20, description="Max items per section"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """DB-only alerts endpoint.

    Builds user-specific opportunities from:
    - `user_likes` (is_active=True)
    - latest `stock_analysis_snapshots`
    - optional `orders` for Triggered/Closed bucketing

    IMPORTANT: Must NOT call Zerodha or GPT.
    """
    try:
        user_id = str(current_user.get("_id"))

        likes = list(db["user_likes"].find({"user_id": user_id, "is_active": True}, {"stock_id": 1}))
        stock_ids = [d.get("stock_id") for d in likes if d.get("stock_id")]
        if not stock_ids:
            return {
                "status": "ok",
                "data": {"top_signals": [], "monitoring": [], "triggered": [], "closed": []},
                "timestamp": datetime.utcnow(),
            }

        stocks = list(
            db["stocks"].find({"stock_id": {"$in": stock_ids}}, {"stock_id": 1, "symbol": 1, "instrument_token": 1})
        )
        by_stock_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}

        live_rows = list(
            db["live_movers"].find({"stock_id": {"$in": stock_ids}}, {"stock_id": 1, "rank": 1, "mover_type": 1})
        )
        live_by_stock_id = {r.get("stock_id"): r for r in live_rows if r.get("stock_id")}

        # Optional: latest order status per symbol
        symbols = [
            (by_stock_id.get(sid) or {}).get("symbol")
            for sid in stock_ids
            if (by_stock_id.get(sid) or {}).get("symbol")
        ]
        latest_order_by_symbol: Dict[str, Dict[str, Any]] = {}
        if symbols and "orders" in db.list_collection_names():
            cur = db["orders"].find({"user_id": user_id, "symbol": {"$in": symbols}}).sort([("created_at", -1)])
            for o in cur:
                sym = (o.get("symbol") or "").strip().upper()
                if sym and sym not in latest_order_by_symbol:
                    latest_order_by_symbol[sym] = o

        top_signals: List[Dict[str, Any]] = []
        monitoring: List[Dict[str, Any]] = []
        triggered: List[Dict[str, Any]] = []
        closed: List[Dict[str, Any]] = []

        for sid in stock_ids:
            stock = by_stock_id.get(sid)
            if not stock:
                continue
            symbol = (stock.get("symbol") or "").strip().upper()
            if not symbol:
                continue

            snaps = list(db["stock_analysis_snapshots"].find({"stock_id": sid}).sort([("timestamp", -1)]).limit(2))
            if not snaps:
                continue

            curr = _extract_analysis_fields(symbol, stock, snaps[0])
            prev = _extract_analysis_fields(symbol, stock, snaps[1]) if len(snaps) > 1 else None
            curr["what_changed"] = _what_changed(prev, curr)

            live = live_by_stock_id.get(sid) or {}
            curr["rank"] = live.get("rank")
            curr["mover_type"] = live.get("mover_type")

            order = latest_order_by_symbol.get(symbol)
            if order:
                status = (order.get("status") or "").strip().upper()
                if status in ("PLACED", "OPEN", "TRIGGERED"):
                    triggered.append(curr)
                    continue
                if status in ("COMPLETE", "CLOSED", "CANCELLED", "REJECTED"):
                    closed.append(curr)
                    continue

            decision = (curr.get("decision") or "HOLD").strip().upper()
            conf = _confidence_score(curr.get("confidence") or "")
            if decision in ("BUY", "SELL") and conf >= 2:
                top_signals.append(curr)
            else:
                monitoring.append(curr)

        def sort_key(row: Dict[str, Any]):
            rank = row.get("rank")
            rank_val = int(rank) if isinstance(rank, int) else 9999
            conf_val = _confidence_score(row.get("confidence") or "")
            ts = row.get("timestamp") or ""
            return (-conf_val, rank_val, ts)

        for arr in (top_signals, monitoring, triggered, closed):
            arr.sort(key=sort_key)

        return {
            "status": "ok",
            "data": {
                "top_signals": top_signals[:limit],
                "monitoring": monitoring[:limit],
                "triggered": triggered[:limit],
                "closed": closed[:limit],
            },
            "timestamp": datetime.utcnow(),
        }
    except Exception as e:
        logger.exception("Failed to build alerts")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/signals", summary="Get user's latest signals/analysis")
async def get_signals(
    limit: int = Query(20, description="Maximum number of signals"),
    symbol: Optional[str] = Query(None, description="Filter by symbol"),
    decision: Optional[str] = Query(None, description="Filter by decision: BUY, SELL, HOLD"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get user's latest trading signals from ChatGPT analysis"""
    try:
        result = get_user_signals_service(
            db=db,
            user_id=str(current_user.get("_id")),
            limit=limit,
            symbol=symbol,
            decision=decision
        )
        return {"status": "success", "signals": result}
    except Exception as e:
        logger.exception("Failed to get signals")
        raise HTTPException(status_code=500, detail=str(e))


# ============ INTERACTIVE CHAT ============


@router.post("/chat/{symbol}", summary="Interactive chat about a specific symbol")
async def chat_with_symbol(
    symbol: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """
    Interactive chat about a symbol with real-time market data
    Payload: {
        "message": "What's the RSI looking like?",
        "include_fresh_data": true,
        "conversation_id": "optional-uuid"
    }
    """
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = chat_with_stock_service(
            db=db,
            zerodha_client=zerodha_client,
            symbol=symbol,
            message=payload.get("message", ""),
            user_id=str(current_user.get("_id")),
            conversation_id=payload.get("conversation_id"),
            include_fresh_data=payload.get("include_fresh_data", True)
        )
        return {"status": "success", "response": result}
    except Exception as e:
        logger.exception(f"Chat failed for symbol {symbol}")
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/chat/{symbol}/enqueue", summary="Enqueue interactive chat (async)")
async def chat_with_symbol_enqueue(
    symbol: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Non-blocking chat.

    Returns immediately with a job_id; the frontend should poll the status endpoint.

    Payload: {
      "message": "...",
      "include_fresh_data": true,
      "conversation_id": "optional-uuid"
    }
    """
    user_id = str(current_user.get("_id")) if isinstance(current_user, dict) else ""
    if not user_id:
        raise HTTPException(status_code=401, detail="Unauthorized")

    sym = (symbol or "").strip().upper()
    msg = (payload.get("message") or "").strip()
    if not sym:
        raise HTTPException(status_code=400, detail="symbol is required")
    if not msg:
        raise HTTPException(status_code=400, detail="message is required")

    jobs_coll_name = os.getenv("CHAT_JOBS_COLLECTION", os.getenv("EARLY_MOVERS_JOBS_COLLECTION", "system_jobs"))
    job_id = str(uuid.uuid4())
    conversation_id = payload.get("conversation_id") or str(uuid.uuid4())
    include_fresh_data = bool(payload.get("include_fresh_data", True))

    job_doc = {
        "_id": job_id,
        "type": "chat_run",
        "status": "queued",
        "created_at": datetime.utcnow(),
        "updated_at": datetime.utcnow(),
        "user_id": user_id,
        "symbol": sym,
        "conversation_id": conversation_id,
        "message": msg,
        "include_fresh_data": include_fresh_data,
        "requested_by": getattr(current_user, "get", lambda k, d=None: None)("email", None)
        if isinstance(current_user, dict)
        else None,
    }

    try:
        db[jobs_coll_name].update_one({"_id": job_id}, {"$set": job_doc}, upsert=True)
    except Exception:
        raise HTTPException(status_code=500, detail="Failed to create job record")

    def _run_chat_job_in_background(*, job_id: str, jobs_coll: str, user_id: str, sym: str, msg: str, conversation_id: str, include_fresh_data: bool):
        client = None
        try:
            from app.db import database as _dbmod

            client, job_db = _dbmod.create_mongo_client_and_db()
            job_db[jobs_coll].update_one(
                {"_id": job_id},
                {"$set": {"status": "running", "started_at": datetime.utcnow(), "updated_at": datetime.utcnow()}},
            )

            # Recreate minimum current_user shape expected by get_zerodha_client_service.
            current_user_stub = {"_id": user_id}
            zerodha_client = None
            if include_fresh_data:
                zerodha_client = get_zerodha_client_service(job_db, current_user_stub)

            t0 = datetime.utcnow()
            result = chat_with_stock_service(
                db=job_db,
                zerodha_client=zerodha_client,
                symbol=sym,
                message=msg,
                user_id=user_id,
                conversation_id=conversation_id,
                include_fresh_data=include_fresh_data,
            )
            dt_s = (datetime.utcnow() - t0).total_seconds()

            job_db[jobs_coll].update_one(
                {"_id": job_id},
                {
                    "$set": {
                        "status": "ok",
                        "finished_at": datetime.utcnow(),
                        "updated_at": datetime.utcnow(),
                        "duration_seconds": float(dt_s),
                        "response": result,
                    }
                },
            )
        except Exception as e:
            try:
                if client is None:
                    from app.db import database as _dbmod

                    client, job_db = _dbmod.create_mongo_client_and_db()
                job_db[jobs_coll].update_one(
                    {"_id": job_id},
                    {
                        "$set": {
                            "status": "error",
                            "finished_at": datetime.utcnow(),
                            "updated_at": datetime.utcnow(),
                            "error": str(e),
                        }
                    },
                )
            except Exception:
                pass
        finally:
            try:
                if client is not None:
                    client.close()
            except Exception:
                pass

    try:
        t = threading.Thread(
            target=_run_chat_job_in_background,
            kwargs={
                "job_id": job_id,
                "jobs_coll": jobs_coll_name,
                "user_id": user_id,
                "sym": sym,
                "msg": msg,
                "conversation_id": conversation_id,
                "include_fresh_data": include_fresh_data,
            },
            daemon=True,
        )
        t.start()
    except Exception:
        db[jobs_coll_name].update_one(
            {"_id": job_id},
            {"$set": {"status": "error", "finished_at": datetime.utcnow(), "updated_at": datetime.utcnow(), "error": "Failed to start background thread"}},
        )
        raise HTTPException(status_code=500, detail="Failed to start background job")

    return {"ok": True, "enqueued": True, "job_id": job_id, "conversation_id": conversation_id, "symbol": sym}


@router.get("/chat/enqueue/status", summary="Get status of an async chat job")
async def chat_job_status(
    job_id: str = Query(..., description="Job id returned by POST /chat/{symbol}/enqueue"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id")) if isinstance(current_user, dict) else ""
    if not user_id:
        raise HTTPException(status_code=401, detail="Unauthorized")

    jobs_coll_name = os.getenv("CHAT_JOBS_COLLECTION", os.getenv("EARLY_MOVERS_JOBS_COLLECTION", "system_jobs"))
    doc = db[jobs_coll_name].find_one(
        {"_id": str(job_id), "type": "chat_run", "user_id": user_id},
        {"_id": 1, "type": 1, "status": 1, "created_at": 1, "started_at": 1, "finished_at": 1, "duration_seconds": 1, "symbol": 1, "conversation_id": 1, "error": 1, "response": 1},
    )
    if not doc:
        raise HTTPException(status_code=404, detail="Job not found")
    return {"ok": True, "job": doc}


@router.get("/chat/{symbol}/history", summary="Get chat history for a symbol")
async def get_chat_history(
    symbol: str,
    limit: int = Query(50, description="Maximum messages"),
    conversation_id: Optional[str] = Query(None, description="Optional conversation id"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get chat history for a specific symbol"""
    try:
        user_id = str(current_user.get("_id"))
        # Hide internal system context snapshots from the user-facing chat history.
        q = {"user_id": user_id, "symbol": symbol, "is_context_snapshot": {"$ne": True}}
        if conversation_id:
            q["conversation_id"] = conversation_id

        chats = list(db["chats"].find(q).sort("created_at", -1).limit(limit))
        
        for chat in chats:
            chat["_id"] = str(chat["_id"])
        
        return {"status": "success", "history": chats}
    except Exception as e:
        logger.exception(f"Failed to get chat history for {symbol}")
        raise HTTPException(status_code=500, detail=str(e))


# ============ ORDER MANAGEMENT ============


@router.post("/order/place", summary="Place order based on ChatGPT analysis")
async def place_trading_order(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """
    Place trading order with ChatGPT risk validation
    Payload: {
        "symbol": "RELIANCE",
        "action": "BUY" | "SELL",
        "quantity": 10,
        "order_type": "MARKET" | "LIMIT",
        "price": 2500.0,  // Required for LIMIT orders
        "analysis_id": "optional-analysis-reference",
        "confirm": true  // Required for execution
    }
    """
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = place_order_service(
            db=db,
            zerodha_client=zerodha_client,
            user_id=str(current_user.get("_id")),
            symbol=payload.get("symbol"),
            action=payload.get("action"),
            quantity=payload.get("quantity"),
            order_type=payload.get("order_type", "MARKET"),
            price=payload.get("price"),
            analysis_id=payload.get("analysis_id"),
            confirmed=payload.get("confirm", False)
        )
        return {"status": "success", "order": result}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Order placement failed")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/orders", summary="Get user's order history")
async def get_orders(
    limit: int = Query(50, description="Maximum orders"),
    status: Optional[str] = Query(None, description="Filter by status"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get user's order history"""
    try:
        user_id = str(current_user.get("_id"))
        query = {"user_id": user_id}
        if status:
            query["status"] = status
            
        orders = list(db["orders"].find(query).sort("created_at", -1).limit(limit))
        
        for order in orders:
            order["_id"] = str(order["_id"])
        
        return {"status": "success", "orders": orders}
    except Exception as e:
        logger.exception("Failed to get orders")
        raise HTTPException(status_code=500, detail=str(e))


# ============ LEGACY COMPATIBILITY ============


@router.get("/live", summary="Legacy compatibility - get live analysis")
async def get_live_analysis(
    mover: str = Query("gainers", description="gainers or losers"),
    limit: int = Query(10, description="Number of symbols to analyze"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """DB-only legacy endpoint.

    IMPORTANT: Must NOT call Zerodha or GPT. Uses latest
    `stock_analysis_snapshots` for symbols in `live_movers`.
    """
    try:
        def _safe_float(v: Any) -> Optional[float]:
            try:
                if v is None:
                    return None
                if isinstance(v, bool):
                    return None
                f = float(v)
                if f != f or f in (float("inf"), float("-inf")):
                    return None
                return f
            except Exception:
                return None

        def _compute_change_pct(quote: Optional[Dict[str, Any]], market_data: Optional[Dict[str, Any]]) -> Optional[float]:
            if isinstance(quote, dict) and quote:
                direct = (
                    quote.get("net_change_percentage")
                    or quote.get("percentage_change")
                    or quote.get("percent_change")
                    or quote.get("pchange")
                    or quote.get("pChange")
                )
                f = _safe_float(direct)
                if f is not None:
                    return f
                net = _safe_float(quote.get("net_change") or quote.get("change"))
                prev = _safe_float((quote.get("ohlc") or {}).get("close"))
                if net is not None and prev and prev != 0:
                    return (net / prev) * 100.0
                last = _safe_float(quote.get("last_price"))
                if last is not None and prev and prev != 0:
                    return ((last - prev) / prev) * 100.0

            # Fallback (DB-only): compute from stored candles (prefer daily).
            if not isinstance(market_data, dict) or not market_data:
                return None
            candles = market_data.get("candles")
            if not isinstance(candles, dict) or not candles:
                return None
            for tf in ("day", "DAY", "1day", "15minute", "15min", "5minute"):
                series = candles.get(tf)
                if not isinstance(series, list) or len(series) < 2:
                    continue
                last_close = _safe_float((series[-1] or {}).get("close"))
                prev_close = _safe_float((series[-2] or {}).get("close"))
                if last_close is None or prev_close is None or prev_close == 0:
                    continue
                return ((last_close - prev_close) / prev_close) * 100.0

            # If daily candles are EOD-only (no current session bar yet), show an intraday
            # move estimate from the available intraday series.
            try:
                s5 = candles.get("5minute")
                if isinstance(s5, list) and len(s5) >= 2:
                    first_open = _safe_float((s5[0] or {}).get("open"))
                    last_close = _safe_float((s5[-1] or {}).get("close"))
                    if first_open is not None and last_close is not None and first_open != 0:
                        return ((last_close - first_open) / first_open) * 100.0
            except Exception:
                return None

            return None

        def _ensure_two_targets(
            *,
            decision: str,
            entry_zone: Optional[Dict[str, Any]],
            entry_price: Optional[float],
            sl: Optional[float],
            targets: List[float],
        ) -> List[float]:
            out = [float(t) for t in targets if _safe_float(t) is not None]

            # Derive a reference entry from zone mid if available.
            zone_mid = None
            if isinstance(entry_zone, dict):
                lo = _safe_float(entry_zone.get("low") or entry_zone.get("lower"))
                hi = _safe_float(entry_zone.get("high") or entry_zone.get("upper"))
                if lo is not None and hi is not None and lo > 0 and hi > 0:
                    zone_mid = (lo + hi) / 2.0
            entry_ref = zone_mid if zone_mid is not None else entry_price
            entry_ref = _safe_float(entry_ref)
            sl = _safe_float(sl)

            d = (decision or "").strip().upper()
            if entry_ref is None or sl is None or d not in ("BUY", "SELL"):
                return out[:2]

            risk = (entry_ref - sl) if d == "BUY" else (sl - entry_ref)
            if risk is None or risk <= 0:
                return out[:2]

            if len(out) == 0:
                t1 = entry_ref + risk if d == "BUY" else entry_ref - risk
                t2 = entry_ref + 2 * risk if d == "BUY" else entry_ref - 2 * risk
                return [round(t1, 2), round(t2, 2)]

            if len(out) == 1:
                t1 = float(out[0])
                if d == "BUY":
                    t2 = max(t1 + risk, entry_ref + 2 * risk)
                else:
                    t2 = min(t1 - risk, entry_ref - 2 * risk)
                out.append(round(float(t2), 2))

            return out[:2]

        def _ensure_sl_side(
            *,
            decision: str,
            entry_zone: Optional[Dict[str, Any]],
            sl: Optional[float],
        ) -> Optional[float]:
            sl = _safe_float(sl)
            if sl is None or not isinstance(entry_zone, dict):
                return sl
            lo = _safe_float(entry_zone.get("low") or entry_zone.get("lower"))
            hi = _safe_float(entry_zone.get("high") or entry_zone.get("upper"))
            if lo is None or hi is None or lo <= 0 or hi <= 0:
                return sl

            d = (decision or "").strip().upper()
            buf = int(os.getenv("ENTRY_SL_BUFFER_BP", "10") or "10")
            buf = max(0, min(200, buf))
            buf_pct = float(buf) / 10000.0

            if d == "BUY":
                max_ok = float(lo) * (1.0 - buf_pct)
                if sl >= float(lo):
                    return max_ok
                return min(float(sl), max_ok)
            if d == "SELL":
                min_ok = float(hi) * (1.0 + buf_pct)
                if sl <= float(hi):
                    return min_ok
                return max(float(sl), min_ok)
            return sl

        # IMPORTANT: Use stock_id as the join key.
        # The older flow (live_movers -> symbol -> stocks -> stock_id) is brittle and
        # can yield empty lists if symbol/exchange normalization differs across tables.
        mover_type = _normalize_mover_param(mover)
        if mover_type == "BOTH":
            gainers = list(
                db["live_movers"]
                .find({"mover_type": "GAINER"}, {"_id": 0, "stock_id": 1, "rank": 1, "mover_type": 1})
                .sort([("rank", 1), ("last_updated", -1)])
                .limit(limit)
            )
            losers = list(
                db["live_movers"]
                .find({"mover_type": "LOSER"}, {"_id": 0, "stock_id": 1, "rank": 1, "mover_type": 1})
                .sort([("rank", 1), ("last_updated", -1)])
                .limit(limit)
            )
            live_rows = (gainers + losers)[:limit]
        else:
            q = {}
            if mover_type in ("GAINER", "LOSER"):
                q["mover_type"] = mover_type
            live_rows = list(
                db["live_movers"]
                .find(q, {"_id": 0, "stock_id": 1, "rank": 1, "mover_type": 1})
                .sort([("rank", 1), ("last_updated", -1)])
                .limit(limit)
            )

        stock_ids = [r.get("stock_id") for r in (live_rows or []) if r.get("stock_id")]
        if not stock_ids:
            return {"status": "success", "scanned": 0, "results": []}

        # Map stock_id -> stock meta (symbol, instrument_token) for best-effort display.
        stocks = list(
            db["stocks"].find(
                {"stock_id": {"$in": stock_ids}},
                {"_id": 0, "stock_id": 1, "symbol": 1, "instrument_token": 1},
            )
        )
        stock_by_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}

        # Load latest snapshot per stock_id in one pass.
        latest_by_id: Dict[str, Dict[str, Any]] = {}
        cur = (
            db["stock_analysis_snapshots"]
            .find({"stock_id": {"$in": stock_ids}})
            .sort([("timestamp", -1), ("_id", -1)])
        )
        for snap in cur:
            sid = snap.get("stock_id")
            if sid and sid not in latest_by_id:
                latest_by_id[sid] = snap

        formatted_results = []
        for row in live_rows:
            sid = row.get("stock_id")
            if not sid:
                continue

            snap = latest_by_id.get(sid)
            if not snap:
                continue

            result = snap.get("analysis") or {}
            if not isinstance(result, dict):
                continue

            stock = stock_by_id.get(sid) or {}
            fallback_symbol = (stock.get("symbol") or "").strip().upper()
            fallback_token = stock.get("instrument_token")

            md = snap.get("market_data") if isinstance(snap, dict) else None
            quote = None
            if isinstance(result.get("market_data"), dict):
                quote = (result.get("market_data") or {}).get("quote")
            if quote is None and isinstance(md, dict):
                quote = md.get("quote")

            md_for_change = None
            if isinstance(result.get("market_data"), dict):
                md_for_change = result.get("market_data")
            elif isinstance(md, dict):
                md_for_change = md
            change_pct = _compute_change_pct(quote, md_for_change)

            # Prefer execution-plan targets when present (new schema)
            raw_exec_targets = result.get("exec_targets")
            exec_targets: List[float] = []
            if isinstance(raw_exec_targets, list):
                exec_targets = [float(x) for x in raw_exec_targets if _safe_float(x) is not None]

            entry_zone = result.get("entry_zone") if isinstance(result.get("entry_zone"), dict) else None
            sl_exec = _safe_float(result.get("exec_sl"))
            sl_fallback = _safe_float(result.get("sl"))
            sl = sl_exec if sl_exec is not None else sl_fallback

            sl = _ensure_sl_side(decision=str(result.get("decision", "HOLD")), entry_zone=entry_zone, sl=sl)

            exec_targets = _ensure_two_targets(
                decision=str(result.get("decision", "HOLD")),
                entry_zone=entry_zone,
                entry_price=_safe_float(result.get("entry_price")) or _safe_float(result.get("current_price")),
                sl=sl or _safe_float(result.get("stop_loss")),
                targets=exec_targets,
            )

            raw_targets = result.get("targets")
            exit_target = None
            if isinstance(raw_targets, (list, tuple)) and raw_targets:
                exit_target = raw_targets[0]

            formatted_results.append({
                "symbol": (result.get("symbol") or fallback_symbol or "").strip().upper(),
                "rank": len(formatted_results) + 1,
                "trend_label": result.get("trend_label") or "",
                "decision": result.get("decision", "HOLD"),
                "confidence": result.get("confidence", "LOW"),
                "instrument_token": result.get("instrument_token") if result.get("instrument_token") is not None else fallback_token,
                "change_pct": change_pct,

                # New execution-plan fields (preferred by newer frontend)
                "entry_zone": entry_zone,
                "entry_trigger_reason": result.get("entry_trigger_reason"),
                "signal_state": result.get("signal_state"),
                "exec_sl": sl,
                "exec_targets": exec_targets,
                "exec_rr_ratio": result.get("exec_rr_ratio"),

                # Core numeric fields from GPT analysis
                "entry_price": result.get("entry_price"),
                "stop_loss": result.get("stop_loss"),
                "price_target": result.get("price_target"),
                "risk_reward_ratio": result.get("risk_reward_ratio"),
                "current_price": result.get("current_price"),
                "metrics": {
                    "decision": result.get("decision", "HOLD"),
                    "confidence": result.get("confidence", "LOW"),
                    "volume_drop": 0.0,
                    "momentum_drop": 0.0,
                    "ema_gap": 0.0,
                    "rsi_divergence": False,
                    "macd_divergence": False,
                    "wedge": False,
                    "pivot_level": "",
                    "near_upper_circuit": False,
                },
                # Full GPT rationale kept only in popup on frontend
                "rationale": result.get("rationale", []),
                # Compact technical + strategy features for popup
                "features": result.get("features", {}),
                "technical_indicators": result.get("technical_indicators", {}),
                "targets": {
                    "entry": result.get("entry_price"),
                    "exit": exit_target
                },
                "timestamp": result.get("timestamp")
            })
        
        return {
            "status": "success",
            "scanned": len(stock_ids),
            "results": formatted_results
        }
        
    except Exception as e:
        logger.exception("Legacy live analysis failed")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/stream", summary="DB-only: stream (single call, multi-tab)")
async def get_combined_stream(
    freshness_minutes: int = Query(
        int(os.getenv("STREAM_FRESHNESS_MINUTES", "10") or "10"),
        ge=1,
        le=60,
        description="Used only for reporting freshness stats (no compute)",
    ),
    movers_limit: int = Query(
        int(os.getenv("STREAM_MOVERS_LIMIT", "20") or "20"),
        ge=0,
        le=200,
        description="Max ET movers per side (gainers/losers)",
    ),
    early_movers_limit: int = Query(
        int(os.getenv("STREAM_EARLY_MOVERS_LIMIT", "40") or "40"),
        ge=0,
        le=200,
        description="Max early movers (bullish+bearish combined)",
    ),
    watchlist_limit: int = Query(
        int(os.getenv("STREAM_WATCHLIST_LIMIT", "50") or "50"),
        ge=0,
        le=200,
        description="Max personal watchlist symbols",
    ),
    early_movers_date: Optional[str] = Query(
        None,
        description="Early movers snapshot date to show: 'today', 'tomorrow', or YYYY-MM-DD. If omitted, after 20:00 IST prefers next trading day (fallback to today).",
    ),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Single-call DB-only stream endpoint.

    Returns 4 tabs worth of data in ONE response:
    - Early Movers
    - ET Gainers
    - ET Losers
    - Personal Watchlist

    IMPORTANT: Must NOT call Zerodha, NSE, ET scraping, or GPT.
    """

    try:
        user_id = str(current_user.get("_id"))
        ist_date = ist_date_str()
        cutoff = datetime.utcnow() - timedelta(minutes=int(freshness_minutes))

        # ----- Build lists (DB-only) -----

        # Personal watchlist (today)
        manual_items = list(
            db["user_portfolio_items"]
            .find({"user_id": user_id, "ist_date": ist_date, "status": "ACTIVE"}, {"_id": 0, "symbol": 1})
            .sort([("updated_at", -1), ("created_at", -1)])
            .limit(int(watchlist_limit))
        )
        portfolio_symbols = [(it.get("symbol") or "").strip().upper() for it in manual_items if (it.get("symbol") or "").strip()]

        # Early movers (default after 20:00 IST: prefer next trading day; fallback to today; else latest)
        early_coll = db[os.getenv("EARLY_MOVERS_SNAPSHOT_COLLECTION", "early_movers_snapshots")]

        pref = _default_early_movers_date_key(today_ist=ist_date)
        requested_key: Optional[str] = None
        raw_req = (early_movers_date or "").strip()
        if raw_req:
            low = raw_req.lower()
            if low == "today":
                requested_key = pref["today_ist"]
            elif low == "tomorrow":
                requested_key = pref["tomorrow_ist"]
            else:
                try:
                    datetime.strptime(raw_req, "%Y-%m-%d")
                    requested_key = raw_req
                except Exception:
                    raise HTTPException(status_code=400, detail="Invalid early_movers_date; expected today|tomorrow|YYYY-MM-DD")

        preferred_key = requested_key or pref["preferred"]

        em_snap = early_coll.find_one({"date": preferred_key})
        if (not em_snap) and preferred_key != pref["today_ist"]:
            em_snap = early_coll.find_one({"date": pref["today_ist"]})
        if not em_snap:
            em_snap = early_coll.find_one({}, sort=[("date", -1)])
        early_symbols: List[str] = []
        early_meta: Optional[Dict[str, Any]] = None
        early_info_by_symbol: Dict[str, Dict[str, Any]] = {}
        if isinstance(em_snap, dict):
            top = em_snap.get("top") if isinstance(em_snap.get("top"), dict) else {}
            bullish = top.get("bullish") if isinstance(top.get("bullish"), list) else []
            bearish = top.get("bearish") if isinstance(top.get("bearish"), list) else []

            # Prefer segmented output (bullish then bearish). Backward-compat: if a legacy
            # snapshot only has top.combined, fall back to that.
            combined = top.get("combined") if isinstance(top.get("combined"), list) else []
            ordered: List[Dict[str, Any]] = []
            if bullish or bearish:
                for it in bullish:
                    if isinstance(it, dict):
                        ordered.append(it)
                for it in bearish:
                    if isinstance(it, dict):
                        ordered.append(it)
            else:
                for it in combined:
                    if isinstance(it, dict):
                        ordered.append(it)

            rnk = 1
            for it in ordered:
                if not isinstance(it, dict):
                    continue
                sym = (it.get("symbol") or "").strip().upper()
                if not sym:
                    continue
                if sym in early_symbols:
                    continue
                early_symbols.append(sym)
                bias = (it.get("bias") or "").strip().upper() or None
                strategy = (it.get("strategy") or "").strip().upper() or None
                if not strategy and bias:
                    # Backward-compat: older snapshots don't have strategy; infer from bias.
                    if bias == "BULLISH":
                        strategy = "BREAKOUT"
                    elif bias == "BEARISH":
                        strategy = "BREAKDOWN"
                early_info_by_symbol[sym] = {
                    "bias": bias,
                    "strategy": strategy,
                    "overall_score": int(it.get("overall_score") or it.get("final_score") or 0),
                    "overall_score_float": float(it.get("overall_score_float") or it.get("final_score_float") or it.get("overall_score") or it.get("final_score") or 0.0),
                    "early_mover_score": int(it.get("early_mover_score") or it.get("algo_score") or it.get("score") or 0),
                    "rank": rnk,
                }
                rnk += 1

            if early_movers_limit > 0:
                early_symbols = early_symbols[: int(early_movers_limit)]
            used_date = em_snap.get("date")
            alt_date = None
            if used_date and used_date != pref["today_ist"]:
                alt_date = pref["today_ist"]
            elif used_date and used_date == pref["today_ist"]:
                alt_date = pref["tomorrow_ist"]

            alt_exists = False
            if alt_date:
                try:
                    alt_exists = bool(early_coll.find_one({"date": alt_date}, {"_id": 1}))
                except Exception:
                    alt_exists = False

            early_meta = {
                "date": used_date,
                "requested_date": preferred_key,
                "today_ist": pref["today_ist"],
                "tomorrow_ist": pref["tomorrow_ist"],
                "after_cutoff": bool(pref["after_cutoff"]),
                "cutoff_hhmm": pref["cutoff_hhmm"],
                "alternate_date": alt_date,
                "alternate_exists": bool(alt_exists),
                "generated_at": em_snap.get("generated_at"),
                "market": em_snap.get("market"),
                "universe": em_snap.get("universe"),
                "algo": em_snap.get("algo"),
                "gpt": em_snap.get("gpt"),
            }

        # ET Movers (gainers/losers)
        gainers_rows = []
        losers_rows = []
        if movers_limit > 0:
            gainers_rows = list(
                db["live_movers"]
                .find({"mover_type": "GAINER"}, {"_id": 0, "stock_id": 1, "rank": 1})
                .sort([("rank", 1), ("last_updated", -1)])
                .limit(int(movers_limit))
            )
            losers_rows = list(
                db["live_movers"]
                .find({"mover_type": "LOSER"}, {"_id": 0, "stock_id": 1, "rank": 1})
                .sort([("rank", 1), ("last_updated", -1)])
                .limit(int(movers_limit))
            )

        def _sid_list(rows: List[Dict[str, Any]]) -> List[str]:
            out: List[str] = []
            for r in rows or []:
                sid = r.get("stock_id")
                if sid and sid not in out:
                    out.append(sid)
            return out

        mover_stock_ids = _sid_list(gainers_rows) + [x for x in _sid_list(losers_rows) if x not in _sid_list(gainers_rows)]

        # Stocks master lookup for all symbols we may need
        all_symbols = list(dict.fromkeys([*portfolio_symbols, *early_symbols]))
        stocks_by_symbol: Dict[str, Dict[str, Any]] = {}
        if all_symbols:
            docs = list(
                db["stocks"].find(
                    {"symbol": {"$in": all_symbols}},
                    {"_id": 0, "stock_id": 1, "symbol": 1, "exchange": 1, "instrument_token": 1, "name": 1},
                )
            )
            for s in docs:
                sym = (s.get("symbol") or "").strip().upper()
                if sym:
                    stocks_by_symbol[sym] = s

        mover_stocks_by_id: Dict[str, Dict[str, Any]] = {}
        if mover_stock_ids:
            docs = list(
                db["stocks"].find(
                    {"stock_id": {"$in": mover_stock_ids}},
                    {"_id": 0, "stock_id": 1, "symbol": 1, "exchange": 1, "instrument_token": 1, "name": 1},
                )
            )
            for s in docs:
                sid = s.get("stock_id")
                if sid:
                    mover_stocks_by_id[sid] = s

        # Build a single stock_id set for snapshot fetch
        stock_ids_for_snaps: List[str] = []
        for sym in all_symbols:
            sid = (stocks_by_symbol.get(sym) or {}).get("stock_id")
            if sid and sid not in stock_ids_for_snaps:
                stock_ids_for_snaps.append(sid)
        for sid in mover_stock_ids:
            if sid and sid not in stock_ids_for_snaps:
                stock_ids_for_snaps.append(sid)

        latest_by_id: Dict[str, Dict[str, Any]] = {}
        if stock_ids_for_snaps:
            cur = (
                db["stock_analysis_snapshots"]
                .find({"stock_id": {"$in": stock_ids_for_snaps}})
                .sort([("timestamp", -1), ("_id", -1)])
            )
            for snap in cur:
                sid = snap.get("stock_id")
                if sid and sid not in latest_by_id:
                    latest_by_id[sid] = snap

        def _row_for_symbol(*, sym: str, mover: str, rank: int) -> Optional[Dict[str, Any]]:
            stock = stocks_by_symbol.get(sym) or {}
            sid = stock.get("stock_id")
            snap = latest_by_id.get(sid) if sid else None
            analysis = snap.get("analysis") if isinstance(snap, dict) and isinstance(snap.get("analysis"), dict) else None
            ts_dt = snap.get("timestamp") if isinstance(snap, dict) and isinstance(snap.get("timestamp"), datetime) else None

            if analysis is None:
                # Placeholder row so the list still renders even if analysis not ready yet.
                analysis = {
                    "symbol": sym,
                    "decision": "HOLD",
                    "confidence": "LOW",
                    "instrument_token": stock.get("instrument_token"),
                }
            else:
                analysis = dict(analysis)
                analysis.setdefault("symbol", sym)
                if analysis.get("instrument_token") is None and stock.get("instrument_token") is not None:
                    analysis["instrument_token"] = stock.get("instrument_token")
                if ts_dt is not None:
                    analysis.setdefault("timestamp", ts_dt.isoformat())

            row = _format_analysis_for_stream_row(analysis, rank=rank, mover=mover)
            if mover == "earlymovers":
                em = early_info_by_symbol.get((sym or "").strip().upper())
                if isinstance(em, dict):
                    row["early_mover"] = em
            return row

        def _row_for_stock_id(*, sid: str, mover: str, rank: int) -> Optional[Dict[str, Any]]:
            stock = mover_stocks_by_id.get(sid) or {}
            sym = (stock.get("symbol") or "").strip().upper()
            if not sym:
                return None
            snap = latest_by_id.get(sid)
            analysis = snap.get("analysis") if isinstance(snap, dict) and isinstance(snap.get("analysis"), dict) else None
            ts_dt = snap.get("timestamp") if isinstance(snap, dict) and isinstance(snap.get("timestamp"), datetime) else None
            if analysis is None:
                analysis = {
                    "symbol": sym,
                    "decision": "HOLD",
                    "confidence": "LOW",
                    "instrument_token": stock.get("instrument_token"),
                }
            else:
                analysis = dict(analysis)
                analysis.setdefault("symbol", sym)
                if analysis.get("instrument_token") is None and stock.get("instrument_token") is not None:
                    analysis["instrument_token"] = stock.get("instrument_token")
                if ts_dt is not None:
                    analysis.setdefault("timestamp", ts_dt.isoformat())
            return _format_analysis_for_stream_row(analysis, rank=rank, mover=mover)

        results: List[Dict[str, Any]] = []
        fresh = 0

        # Early movers tab (ordered by % change magnitude)
        early_rows: List[Dict[str, Any]] = []
        r = 1
        for sym in early_symbols:
            row = _row_for_symbol(sym=sym, mover="earlymovers", rank=r)
            if not row:
                continue
            early_rows.append(row)
            try:
                ts = row.get("timestamp")
                if isinstance(ts, str) and ts:
                    dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
                    if dt >= cutoff:
                        fresh += 1
            except Exception:
                pass
            r += 1

        def _abs_change(row: Dict[str, Any]) -> float:
            v = row.get("change_pct")
            try:
                f = float(v)
                if f != f or f in (float("inf"), float("-inf")):
                    return -1e18
                return abs(f)
            except Exception:
                return -1e18

        early_rows.sort(key=_abs_change, reverse=True)
        for i, row in enumerate(early_rows, start=1):
            row["rank"] = i
        results.extend(early_rows)

        # ET gainers tab
        r = 1
        for it in gainers_rows:
            sid = it.get("stock_id")
            if not sid:
                continue
            row = _row_for_stock_id(sid=sid, mover="gainers", rank=r)
            if not row:
                continue
            results.append(row)
            r += 1

        # ET losers tab
        r = 1
        for it in losers_rows:
            sid = it.get("stock_id")
            if not sid:
                continue
            row = _row_for_stock_id(sid=sid, mover="losers", rank=r)
            if not row:
                continue
            results.append(row)
            r += 1

        # Personal watchlist tab
        r = 1
        for sym in portfolio_symbols:
            row = _row_for_symbol(sym=sym, mover="portfolio", rank=r)
            if not row:
                continue
            results.append(row)
            r += 1

        return {
            "status": "success",
            "results": results,
            "tabs": {
                "earlymovers": {"count": len(early_symbols)},
                "gainers": {"count": len(gainers_rows)},
                "losers": {"count": len(losers_rows)},
                "portfolio": {"count": len(portfolio_symbols)},
            },
            "early_movers_analysis": early_meta,
            "cached": int(fresh),
            "computed": 0,
            "freshness_minutes": freshness_minutes,
            "ist_date": ist_date,
            "timestamp": datetime.utcnow(),
        }
    except Exception as e:
        logger.exception("Failed to build combined stream")
        raise HTTPException(status_code=500, detail=str(e))


# ============ FRONTEND COMPATIBILITY ============


@router.get("/profile", summary="Get Zerodha profile status")
async def get_profile(
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get Zerodha profile and authentication status"""
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        
        # Try to get profile to verify connection
        try:
            profile = zerodha_client.profile()
            return {
                "status": "ok",
                "authenticated": True,
                "profile": profile
            }
        except Exception as e:
            return {
                "status": "error",
                "authenticated": False,
                "message": "Zerodha authentication required"
            }
    except HTTPException as e:
        if e.status_code == 404:
            return {"status": "error", "authenticated": False, "message": "Zerodha settings not found"}
        elif e.status_code == 403:
            return {"status": "error", "authenticated": False, "message": "Zerodha not authenticated"}
        raise
    except Exception as e:
        logger.exception("Profile check failed")
        return {"status": "error", "authenticated": False, "message": str(e)}


@router.post("/stream/feedback/{analysis_id}", summary="Provide feedback on analysis")
async def stream_feedback(
    analysis_id: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Provide thumbs up/down feedback on analysis.

    Stored in `user_likes` as a per-user preference signal:
    - feedback="up"   -> is_active=True
    - feedback="down" -> is_active=False

    We resolve `stock_id` by looking up the snapshot that contains
    `analysis.analysis_id == analysis_id`.
    """
    try:
        feedback = payload.get("feedback")  # "up" or "down"
        if feedback not in ("up", "down"):
            raise HTTPException(status_code=400, detail="feedback must be 'up' or 'down'")

        snap = db["stock_analysis_snapshots"].find_one({"analysis.analysis_id": analysis_id})
        if not snap:
            # fallback for cases where older code wrote ObjectId as string
            snap = db["stock_analysis_snapshots"].find_one({"analysis.analysis_id": str(analysis_id)})
        if not snap:
            raise HTTPException(status_code=404, detail="Analysis snapshot not found")

        stock_id = snap.get("stock_id")
        if not stock_id:
            raise HTTPException(status_code=404, detail="Snapshot missing stock_id")

        user_id = str(current_user.get("_id"))
        now = datetime.utcnow()
        is_active = feedback == "up"
        db["user_likes"].update_one(
            {"user_id": user_id, "stock_id": stock_id},
            {
                "$set": {
                    "is_active": is_active,
                    "last_feedback": feedback,
                    "updated_at": now,
                },
                "$setOnInsert": {
                    "created_at": now,
                },
            },
            upsert=True,
        )

        return {"status": "ok", "feedback": feedback, "stock_id": stock_id, "is_active": is_active}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Feedback update failed")
        raise HTTPException(status_code=500, detail=str(e))
