# app/v1/routers/teGPT.py
"""
Pure ChatGPT-based Trading Engine Router
No custom algorithms - all analysis delegated to OpenAI GPT models
"""
from fastapi import APIRouter, Depends, HTTPException, Body, Query, File, UploadFile
from typing import Dict, Any, List, Optional
import logging
from datetime import datetime, timedelta
from bson import ObjectId
import re
import csv
import io

from pymongo import UpdateOne
from collections import deque

import os

from app.db import database
from app.v1.dependencies.auth import get_current_userdetails
from app.v1.services.zerodha.client import ZerodhaClient
from app.v1.services.teGPT import (
    DEFAULT_TIMEFRAMES,
    get_zerodha_client_service,
    get_market_movers_service,
    analyze_symbol_service,
    bulk_analyze_service,
    chat_with_stock_service,
    place_order_service,
    get_user_signals_service,
    refresh_instruments_service,
    validate_openai_setup,
)

router = APIRouter()
logger = logging.getLogger(__name__)


def _env_int(name: str, default: int, *, min_value: int, max_value: int) -> int:
    raw = os.getenv(name)
    if raw is None or str(raw).strip() == "":
        return default
    try:
        v = int(str(raw).strip())
    except Exception:
        return default
    return max(min_value, min(max_value, v))


DEFAULT_PORTFOLIO_LIVE_FRESHNESS_MINUTES = _env_int(
    "PORTFOLIO_LIVE_FRESHNESS_MINUTES",
    10,
    min_value=1,
    max_value=60,
)


DEFAULT_NIFTY50_MAX_ANALYSES = _env_int(
    "NIFTY50_MAX_ANALYSES",
    50,
    min_value=1,
    max_value=200,
)


def _dt_to_iso(v: Any) -> Optional[str]:
    if v is None:
        return None
    if isinstance(v, datetime):
        return v.isoformat()
    # KiteConnect sometimes returns datetime; if it is already a string, keep it.
    if isinstance(v, str):
        return v
    return None


def _parse_ts(v: Any) -> Optional[datetime]:
    if v is None:
        return None
    if isinstance(v, datetime):
        return v
    if isinstance(v, str) and v.strip():
        # Keep parsing simple and safe; handle ISO-ish strings.
        try:
            return datetime.fromisoformat(v.replace("Z", "+00:00"))
        except Exception:
            return None
    return None


def _parse_ts_flexible(v: Any) -> Optional[datetime]:
    """Parse common CSV/export timestamp formats.

    Keeps behavior conservative: returns None if unparseable.
    """
    dt = _parse_ts(v)
    if dt is not None:
        return dt
    if not isinstance(v, str) or not v.strip():
        return None
    s = v.strip()
    for fmt in (
        "%d-%m-%Y %H:%M:%S",
        "%d/%m/%Y %H:%M:%S",
        "%Y-%m-%d %H:%M:%S",
        "%Y/%m/%d %H:%M:%S",
        "%d-%m-%Y %H:%M",
        "%d/%m/%Y %H:%M",
        "%Y-%m-%d",
        "%d-%m-%Y",
        "%d/%m/%Y",
    ):
        try:
            return datetime.strptime(s, fmt)
        except Exception:
            continue
    return None


def _norm_csv_key(k: Any) -> str:
    if not isinstance(k, str):
        return ""
    return re.sub(r"[^a-z0-9]+", "", k.strip().lower())


def _pick(row: Dict[str, Any], *keys: str) -> Any:
    for k in keys:
        nk = _norm_csv_key(k)
        if not nk:
            continue
        if nk in row and row.get(nk) not in (None, ""):
            return row.get(nk)
    return None


def _safe_float(v: Any) -> Optional[float]:
    try:
        n = float(v)
        return n if n == n and n not in (float("inf"), float("-inf")) else None
    except Exception:
        return None


def _safe_int(v: Any) -> Optional[int]:
    try:
        return int(v)
    except Exception:
        return None


def _norm_symbol(symbol: str) -> str:
    return (symbol or "").strip().upper()


def _norm_tags(tags: Any) -> List[str]:
    if tags is None:
        return []
    if isinstance(tags, str):
        parts = [p.strip() for p in tags.split(",")]
        return [p.lower() for p in parts if p]
    if isinstance(tags, list):
        out: List[str] = []
        for t in tags:
            if isinstance(t, str) and t.strip():
                out.append(t.strip().lower())
        # de-dupe preserving order
        seen = set()
        deduped = []
        for t in out:
            if t in seen:
                continue
            seen.add(t)
            deduped.append(t)
        return deduped
    return []


def _norm_account_id(v: Any) -> str:
    return (v or "").strip()


def _norm_side(v: Any) -> Optional[str]:
    if v is None:
        return None
    s = str(v).strip().upper()
    if s in ("BUY", "B"):
        return "BUY"
    if s in ("SELL", "S"):
        return "SELL"
    # common CSV values
    if s in ("BUY ", "PURCHASE"):
        return "BUY"
    if s in ("SELL ", "SALE"):
        return "SELL"
    if s in ("BUY", "SELL"):
        return s
    if s.lower() == "buy":
        return "BUY"
    if s.lower() == "sell":
        return "SELL"
    return "BUY" if s == "BUY" else ("SELL" if s == "SELL" else None)


def _compute_fifo_enrichment(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
    """Compute gross realized P&L using FIFO matching.

    - Works on execution-level rows (BUY/SELL, qty, price).
    - Returns per-row: realized_pnl, matched_qty, matched_avg_entry_price, position_after, open_avg_price.
    - This is gross P&L (fees/taxes not included).
    """

    def key_of(it: Dict[str, Any]) -> tuple:
        return (
            (it.get("symbol") or "").strip().upper(),
            (it.get("product") or ""),
            (it.get("exchange") or ""),
            (it.get("segment") or it.get("raw", {}).get("segment") or ""),
            (it.get("series") or it.get("raw", {}).get("series") or ""),
        )

    def num(v: Any) -> Optional[float]:
        try:
            f = float(v)
            return f if f == f and f not in (float("inf"), float("-inf")) else None
        except Exception:
            return None

    # process oldest -> newest
    enriched = []
    lots_by_key: Dict[tuple, deque] = {}

    for it in sorted(items, key=lambda x: x.get("ts") or datetime.min):
        side = _norm_side(it.get("transaction_type"))
        qty_i = _safe_int(it.get("quantity"))
        px = num(it.get("price"))
        if px is None:
            px = num(it.get("average_price"))

        realized = 0.0
        matched_qty = 0
        matched_entry_val = 0.0

        if not side or not qty_i or qty_i <= 0 or px is None:
            # still compute position snapshot
            k = key_of(it)
            dq = lots_by_key.get(k) or deque()
            pos = sum(l["qty"] for l in dq)
            open_qty = pos
            open_avg = None
            if open_qty:
                same = [l for l in dq if (l["qty"] > 0) == (open_qty > 0)]
                denom = sum(abs(l["qty"]) for l in same)
                if denom:
                    open_avg = sum(abs(l["qty"]) * l["price"] for l in same) / denom
            out = dict(it)
            out.update(
                {
                    "realized_pnl": None,
                    "matched_qty": 0,
                    "matched_avg_entry_price": None,
                    "position_after": pos,
                    "open_avg_price": open_avg,
                }
            )
            enriched.append(out)
            continue

        signed = qty_i if side == "BUY" else -qty_i
        k = key_of(it)
        dq = lots_by_key.setdefault(k, deque())

        remaining = abs(signed)
        if signed > 0:
            # BUY: first close shorts (negative lots)
            while remaining > 0 and dq and dq[0]["qty"] < 0:
                lot = dq[0]
                cover = min(remaining, -lot["qty"])
                # short pnl: sold at lot.price, bought at px
                realized += (lot["price"] - px) * cover
                matched_qty += cover
                matched_entry_val += cover * lot["price"]
                lot["qty"] += cover  # lot.qty is negative
                remaining -= cover
                if lot["qty"] == 0:
                    dq.popleft()
            if remaining > 0:
                dq.append({"qty": remaining, "price": px})
        else:
            # SELL: first close longs (positive lots)
            while remaining > 0 and dq and dq[0]["qty"] > 0:
                lot = dq[0]
                close = min(remaining, lot["qty"])
                realized += (px - lot["price"]) * close
                matched_qty += close
                matched_entry_val += close * lot["price"]
                lot["qty"] -= close
                remaining -= close
                if lot["qty"] == 0:
                    dq.popleft()
            if remaining > 0:
                # becomes a short position
                dq.append({"qty": -remaining, "price": px})

        pos = sum(l["qty"] for l in dq)
        open_qty = pos
        open_avg = None
        if open_qty:
            same = [l for l in dq if (l["qty"] > 0) == (open_qty > 0)]
            denom = sum(abs(l["qty"]) for l in same)
            if denom:
                open_avg = sum(abs(l["qty"]) * l["price"] for l in same) / denom

        out = dict(it)
        out.update(
            {
                "realized_pnl": round(realized, 2),
                "matched_qty": int(matched_qty),
                "matched_avg_entry_price": round(matched_entry_val / matched_qty, 4) if matched_qty else None,
                "position_after": int(pos),
                "open_avg_price": round(open_avg, 4) if open_avg is not None else None,
            }
        )
        enriched.append(out)

    return enriched

def _portfolio_account_match(account_id: str) -> Dict[str, Any]:
    """Match portfolio docs for the given account.

    Backward compatible: include docs with missing/empty account_id.
    """

    aid = _norm_account_id(account_id)
    if not aid:
        return {"$or": [{"account_id": {"$exists": False}}, {"account_id": ""}, {"account_id": None}]}
    return {
        "$or": [
            {"account_id": aid},
            {"account_id": {"$exists": False}},
            {"account_id": ""},
            {"account_id": None},
        ]
    }


def _resolve_instrument_token_via_user_zerodha(
    zerodha_client: ZerodhaClient,
    symbol: str,
    exchange: str,
) -> Optional[Dict[str, Any]]:
    sym = _norm_symbol(symbol)
    ex = (exchange or "NSE").strip().upper()

    attempts = []
    if ex:
        attempts.append(ex)
    for fallback in ("NSE", "BSE"):
        if fallback not in attempts:
            attempts.append(fallback)

    for exch in attempts:
        key = f"{exch}:{sym}"

        # Prefer quote (richer) then ltp (lighter).
        try:
            qd = zerodha_client.get_quote([key])
            row = qd.get(key) if isinstance(qd, dict) else None
            if isinstance(row, dict) and row.get("instrument_token"):
                return {
                    "instrument_token": row.get("instrument_token"),
                    "exchange": exch,
                    "tradingsymbol": sym,
                }
        except Exception:
            pass

        try:
            kite = getattr(zerodha_client, "kite", None)
            if kite is None:
                continue
            ld = kite.ltp([key])
            row = ld.get(key) if isinstance(ld, dict) else None
            if isinstance(row, dict) and row.get("instrument_token"):
                return {
                    "instrument_token": row.get("instrument_token"),
                    "exchange": exch,
                    "tradingsymbol": sym,
                }
        except Exception:
            pass

    return None


def _ensure_stock_identity(db, symbol: str, exchange: str = "NSE") -> Dict[str, Any]:
    """Ensure `stocks` has a stable identity row with `stock_id` and instrument metadata."""

    sym = _norm_symbol(symbol)
    if not sym:
        raise HTTPException(status_code=400, detail="symbol is required")

    exchange = (exchange or "NSE").strip().upper()
    now = datetime.utcnow()

    stock = db["stocks"].find_one({"symbol": sym, "exchange": exchange}) or db["stocks"].find_one({"symbol": sym})

    if not stock:
        stock_doc: Dict[str, Any] = {
            "symbol": sym,
            "exchange": exchange,
            "sector": None,
            "is_active": True,
            "created_at": now,
        }
        inserted = db["stocks"].insert_one(stock_doc)
        stock = {**stock_doc, "_id": inserted.inserted_id}

    # Ensure stock_id is present and stable
    if not stock.get("stock_id"):
        sid = str(stock.get("_id"))
        db["stocks"].update_one({"_id": stock["_id"]}, {"$set": {"stock_id": sid}})
        stock["stock_id"] = sid

    # Try to enrich instrument data from cached Zerodha instruments (DB-only)
    inst = db["zerodha_instruments"].find_one(
        {"tradingsymbol": {"$regex": f"^{re.escape(sym)}$", "$options": "i"}, "exchange": exchange},
        {
            "_id": 0,
            "instrument_token": 1,
            "tradingsymbol": 1,
            "name": 1,
            "segment": 1,
            "tick_size": 1,
            "lot_size": 1,
            "exchange": 1,
        },
    )

    if inst and inst.get("instrument_token"):
        patch: Dict[str, Any] = {
            "instrument_token": inst.get("instrument_token"),
            "tradingsymbol": inst.get("tradingsymbol") or sym,
            "name": inst.get("name"),
            "segment": inst.get("segment"),
            "tick_size": inst.get("tick_size"),
            "lot_size": inst.get("lot_size"),
            "exchange": inst.get("exchange") or exchange,
            "instrument_updated_at": now,
        }
        db["stocks"].update_one({"_id": stock["_id"]}, {"$set": patch})
        stock.update({k: v for k, v in patch.items() if v is not None})

    return stock


def _normalize_mover_param(mover: str) -> str:
    v = (mover or "").strip().lower()
    if v in ("gainers", "gainer", "top-gainers"):
        return "GAINER"
    if v in ("losers", "loser", "top-losers"):
        return "LOSER"
    if v in ("both", "all"):
        return "BOTH"
    return "GAINER"


def _get_stock_by_symbol(db, symbol: str) -> Optional[Dict[str, Any]]:
    sym = (symbol or "").strip().upper()
    if not sym:
        return None
    return db["stocks"].find_one({"symbol": sym, "exchange": "NSE"}) or db["stocks"].find_one({"symbol": sym})


def _get_symbols_from_live_movers(db, mover_type: str, limit: int) -> List[str]:
    q: Dict[str, Any] = {}
    if mover_type in ("GAINER", "LOSER"):
        q["mover_type"] = mover_type

    live = list(db["live_movers"].find(q).sort([("rank", 1), ("last_updated", -1)]).limit(limit))
    stock_ids = [d.get("stock_id") for d in live if d.get("stock_id")]
    if not stock_ids:
        return []

    stocks = list(db["stocks"].find({"stock_id": {"$in": stock_ids}}, {"stock_id": 1, "symbol": 1}))
    by_id = {s.get("stock_id"): (s.get("symbol") or "").strip().upper() for s in stocks}
    out: List[str] = []
    for sid in stock_ids:
        sym = by_id.get(sid)
        if sym:
            out.append(sym)
    return out


def _get_latest_snapshot_by_stock_id(db, stock_id: str) -> Optional[Dict[str, Any]]:
    if not stock_id:
        return None
    return db["stock_analysis_snapshots"].find_one({"stock_id": stock_id}, sort=[("timestamp", -1)])


def _get_latest_snapshot_by_symbol(db, symbol: str) -> Optional[Dict[str, Any]]:
    stock = _get_stock_by_symbol(db, symbol)
    if not stock or not stock.get("stock_id"):
        return None
    return _get_latest_snapshot_by_stock_id(db, stock.get("stock_id"))


def _extract_analysis_fields(symbol: str, stock: Optional[Dict[str, Any]], snap: Dict[str, Any]) -> Dict[str, Any]:
    analysis = snap.get("analysis") or {}
    if not isinstance(analysis, dict):
        analysis = {}

    decision = (analysis.get("decision") or analysis.get("action") or "HOLD")
    confidence = analysis.get("confidence") or analysis.get("confidence_level") or "LOW"

    targets = analysis.get("targets")
    if isinstance(targets, dict):
        entry_price = analysis.get("entry_price") or targets.get("entry")
    else:
        entry_price = analysis.get("entry_price")

    out = {
        "analysis_id": analysis.get("analysis_id") or str(snap.get("_id")),
        "symbol": (symbol or "").strip().upper(),
        "decision": str(decision).upper() if decision is not None else "HOLD",
        "confidence": str(confidence).upper() if confidence is not None else "LOW",
        "entry_price": entry_price,
        "stop_loss": analysis.get("stop_loss"),
        "targets": targets,
        "timestamp": snap.get("timestamp").isoformat() if isinstance(snap.get("timestamp"), datetime) else None,
    }

    if stock:
        out["instrument_token"] = stock.get("instrument_token")
        out["stock_id"] = stock.get("stock_id")

    return out


def _what_changed(prev: Optional[Dict[str, Any]], curr: Dict[str, Any]) -> str:
    if not prev:
        return "New"

    parts: List[str] = []
    if prev.get("decision") != curr.get("decision"):
        parts.append(f"decision {prev.get('decision')}→{curr.get('decision')}")
    if prev.get("confidence") != curr.get("confidence"):
        parts.append(f"confidence {prev.get('confidence')}→{curr.get('confidence')}")

    for key in ("entry_price", "stop_loss"):
        if prev.get(key) != curr.get(key) and curr.get(key) is not None:
            parts.append(f"{key} updated")

    return "; ".join(parts) if parts else "-"


def _confidence_score(v: str) -> int:
    s = (v or "").strip().upper()
    return {"HIGH": 3, "MEDIUM": 2, "LOW": 1}.get(s, 0)

# ============ HEALTH & SETUP ============

@router.get("/health", summary="Health check and OpenAI configuration status")
async def health_check():
    """Check system health and OpenAI configuration"""
    openai_status = validate_openai_setup()
    return {
        "status": "ok",
        "service": "ChatGPT Trading Engine",
        "openai_configured": openai_status["configured"],
        "openai_message": openai_status["message"]
    }

@router.get("/config", summary="Get current configuration")
async def get_config(current_user=Depends(get_current_userdetails)):
    """Get current system configuration"""
    return {
        "user_id": str(current_user.get("_id")),
        "model": "gpt-4o-mini",
        "features": {
            "bulk_analysis": True,
            "interactive_chat": True,
            "order_placement": True,
            "real_time_data": True
        }
    }

# ============ MARKET DATA ============

@router.get("/movers", summary="Get market top gainers/losers")
async def get_market_movers(
    type: str = Query("gainers", description="Type: gainers, losers, or both"),
    limit: int = Query(50, description="Maximum number of symbols"),
    db=Depends(database.get_mongo_db)
):
    """DB-only movers endpoint.

    IMPORTANT: This endpoint must NOT trigger ET scraping, Zerodha calls, or GPT.
    Background workers populate `live_movers` and `stocks`.
    """
    try:
        mover_type = _normalize_mover_param(type)
        symbols: List[str] = []
        if mover_type == "BOTH":
            # keep ordering stable: gainers then losers
            symbols = _get_symbols_from_live_movers(db, "GAINER", limit) + _get_symbols_from_live_movers(db, "LOSER", limit)
            symbols = symbols[:limit]
        else:
            symbols = _get_symbols_from_live_movers(db, mover_type, limit)

        return {
            "status": "success",
            "data": {
                "type": type,
                "symbols": symbols,
                "source": "db",
                "timestamp": datetime.utcnow(),
            },
        }
    except Exception as e:
        logger.exception("Failed to get DB movers")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/movers/live-model")
def get_live_model_state(db=Depends(database.get_mongo_db)):
    """Return current STOCK + live_movers + latest analysis for debugging (DB-only)."""

    live = list(db["live_movers"].find({}))
    stock_ids = {doc.get("stock_id") for doc in live if doc.get("stock_id")}

    stocks = []
    if stock_ids:
        stocks = list(db["stocks"].find({"stock_id": {"$in": list(stock_ids)}}))

    latest_analysis = {}
    for sid in stock_ids:
        snap = db["stock_analysis_snapshots"].find_one(
            {"stock_id": sid}, sort=[("timestamp", -1)]
        )
        if snap:
            latest_analysis[sid] = snap

    return {
        "live_movers": live,
        "stocks": stocks,
        "latest_analysis": latest_analysis,
    }
@router.get("/movers/top10", summary="Get top 10 market movers (frontend compatibility)")
async def get_top10_movers(
    mover: str = Query("gainers", description="gainers or losers"),
    db=Depends(database.get_mongo_db)
):
    """DB-only top10 movers endpoint used by Stream UI."""
    try:
        mover_type = _normalize_mover_param(mover)
        if mover_type == "BOTH":
            symbols = (_get_symbols_from_live_movers(db, "GAINER", 10) + _get_symbols_from_live_movers(db, "LOSER", 10))[:10]
        else:
            symbols = _get_symbols_from_live_movers(db, mover_type, 10)

        return {
            "status": "ok",
            "top10": symbols,
            "source": "db",
            "timestamp": datetime.utcnow()
        }
    except Exception as e:
        logger.exception("Failed to get top10 movers")
        raise HTTPException(status_code=500, detail=str(e))

@router.post("/instruments/refresh", summary="Refresh instrument data from Zerodha")
async def refresh_instruments(
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Refresh instrument master data from Zerodha"""
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = refresh_instruments_service(db, zerodha_client)
        return {"status": "success", "updated": result["count"], "timestamp": result["timestamp"]}
    except Exception as e:
        logger.exception("Failed to refresh instruments")
        raise HTTPException(status_code=500, detail=str(e))


# ============ LEARNING (Personal Zerodha trades) ============


@router.get("/learning/transactions", summary="Get user's recent Zerodha transactions (trades)")
async def get_learning_transactions(
    limit: int = Query(50, ge=1, le=200, description="Max rows to return"),
    days: int = Query(120, ge=1, le=365, description="How many days of history to return"),
    sync: bool = Query(True, description="Sync today's Zerodha trades/orders into DB before listing"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Personal endpoint: last N transactions across months.

    Zerodha's official API typically exposes only today's tradebook/orders.
    To support multi-month history, we persist daily syncs into Mongo.
    """
    try:
        user_id = str(current_user.get("_id"))
        col = db["learning_transactions"]

        cutoff = datetime.utcnow() - timedelta(days=days)

        sync_report = {
            "attempted": bool(sync),
            "trades_seen": 0,
            "orders_seen": 0,
            "upserts": 0,
            "errors": [],
        }

        if sync:
            zerodha_client = None
            try:
                zerodha_client = get_zerodha_client_service(db, current_user)
            except Exception:
                # Do not fail the endpoint if Zerodha auth is invalid/expired.
                sync_report["errors"].append("zerodha_client_unavailable")

            def upsert_one(kind: str, it: Dict[str, Any]) -> None:
                ts_raw = (
                    it.get("exchange_timestamp")
                    or it.get("order_timestamp")
                    or it.get("fill_timestamp")
                    or it.get("timestamp")
                )
                ts_dt = _parse_ts(ts_raw)
                symbol = (it.get("tradingsymbol") or it.get("symbol") or "").strip().upper()

                # Ensure we always have a usable timestamp for sorting/filtering.
                now = datetime.utcnow()
                effective_ts = ts_dt or now

                doc = {
                    "user_id": user_id,
                    "kind": kind,
                    "symbol": symbol,
                    "exchange": it.get("exchange"),
                    "product": it.get("product"),
                    "transaction_type": (it.get("transaction_type") or "").upper() or None,
                    "quantity": _safe_int(it.get("quantity")),
                    "price": _safe_float(it.get("price")),
                    "average_price": _safe_float(it.get("average_price")),
                    "trade_id": it.get("trade_id"),
                    "order_id": it.get("order_id"),
                    "status": it.get("status"),
                    "ts": effective_ts,
                    "instrument_token": _safe_int(it.get("instrument_token") or it.get("token")),
                    "raw": it,
                    "updated_at": now,
                }
                # Choose a stable upsert key.
                if kind == "trade" and it.get("trade_id"):
                    key = {"user_id": user_id, "kind": "trade", "trade_id": it.get("trade_id")}
                elif it.get("order_id"):
                    key = {"user_id": user_id, "kind": kind, "order_id": it.get("order_id")}
                else:
                    key = {"user_id": user_id, "kind": kind, "symbol": symbol, "ts": effective_ts}

                col.update_one(key, {"$set": doc, "$setOnInsert": {"created_at": datetime.utcnow()}}, upsert=True)
                sync_report["upserts"] += 1

            if zerodha_client is not None:
                # Sync trades (today) and orders (today) so the DB accumulates over time.
                try:
                    trades = zerodha_client.get_trades()
                    if isinstance(trades, list):
                        sync_report["trades_seen"] = len(trades)
                        for it in trades:
                            if isinstance(it, dict):
                                upsert_one("trade", it)
                except Exception:
                    sync_report["errors"].append("trades_sync_failed")

                try:
                    orders = zerodha_client.get_orders()
                    if isinstance(orders, list):
                        sync_report["orders_seen"] = len(orders)
                        for it in orders:
                            if isinstance(it, dict):
                                upsert_one("order", it)
                except Exception:
                    sync_report["errors"].append("orders_sync_failed")

        # Read from DB (months history)
        q = {
            "user_id": user_id,
            "$or": [
                {"ts": {"$gte": cutoff}},
                {"ts": None},
                {"ts": {"$exists": False}},
            ],
        }
        # Pull more rows than we return so FIFO has context; still fast.
        compute_cap = max(limit, min(2000, days * 50))
        cur = col.find(q, {"_id": 0}).sort([("ts", 1), ("updated_at", 1)]).limit(compute_cap)
        docs = list(cur)

        base_items: List[Dict[str, Any]] = []
        for d in docs:
            base_items.append(
                {
                    "kind": d.get("kind") or "—",
                    "symbol": d.get("symbol") or "",
                    "exchange": d.get("exchange"),
                    "product": d.get("product"),
                    "transaction_type": d.get("transaction_type"),
                    "quantity": d.get("quantity"),
                    "price": d.get("price"),
                    "average_price": d.get("average_price"),
                    "trade_id": d.get("trade_id"),
                    "order_id": d.get("order_id"),
                    "status": d.get("status"),
                    "ts": d.get("ts") or d.get("updated_at"),
                    "exchange_timestamp": _dt_to_iso(d.get("ts")) or _dt_to_iso(d.get("updated_at")),
                    "instrument_token": d.get("instrument_token"),
                    "segment": d.get("segment"),
                    "series": d.get("series"),
                    "isin": d.get("isin"),
                    "raw": d.get("raw") if isinstance(d.get("raw"), dict) else {},
                }
            )

        enriched_all = _compute_fifo_enrichment(base_items)

        # Return newest rows only, with enrichment.
        enriched_all_sorted = sorted(enriched_all, key=lambda x: x.get("ts") or datetime.min, reverse=True)
        items = []
        for it in enriched_all_sorted[:limit]:
            qty = _safe_int(it.get("quantity"))
            px = _safe_float(it.get("price"))
            if px is None:
                px = _safe_float(it.get("average_price"))
            value = round((qty or 0) * (px or 0.0), 2) if qty and px is not None else None

            items.append(
                {
                    "kind": it.get("kind") or "—",
                    "symbol": it.get("symbol") or "",
                    "exchange": it.get("exchange"),
                    "product": it.get("product"),
                    "transaction_type": _norm_side(it.get("transaction_type")),
                    "quantity": it.get("quantity"),
                    "price": it.get("price"),
                    "average_price": it.get("average_price"),
                    "trade_id": it.get("trade_id"),
                    "order_id": it.get("order_id"),
                    "status": it.get("status"),
                    "exchange_timestamp": it.get("exchange_timestamp"),
                    "instrument_token": it.get("instrument_token"),
                    "isin": it.get("isin") or (it.get("raw") or {}).get("isin"),
                    "segment": it.get("segment") or (it.get("raw") or {}).get("segment"),
                    "series": it.get("series") or (it.get("raw") or {}).get("series"),
                    "value": value,
                    "realized_pnl": it.get("realized_pnl"),
                    "matched_qty": it.get("matched_qty"),
                    "matched_avg_entry_price": it.get("matched_avg_entry_price"),
                    "position_after": it.get("position_after"),
                    "open_avg_price": it.get("open_avg_price"),
                }
            )

        total = col.count_documents(q)

        return {
            "status": "success",
            "source": "db_learning_transactions",
            "days": days,
            "total": int(total),
            "sync": sync_report,
            "items": items,
        }
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Failed to fetch learning transactions")
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/learning/transactions/import", summary="Import (backfill) learning transactions from CSV")
async def import_learning_transactions_csv(
    file: UploadFile = File(..., description="CSV export (tradebook/contract note)"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Backfill multi-month history.

    Zerodha APIs generally don't provide months of tradebook history.
    This endpoint lets the user import/exported transactions into Mongo.
    """
    try:
        user_id = str(current_user.get("_id"))
        col = db["learning_transactions"]

        raw_bytes = await file.read()
        if not raw_bytes:
            raise HTTPException(status_code=400, detail="Empty file")

        # Decode with BOM-safe utf-8; fall back to latin-1.
        try:
            text = raw_bytes.decode("utf-8-sig")
        except Exception:
            text = raw_bytes.decode("latin-1")

        reader = csv.DictReader(io.StringIO(text))
        if not reader.fieldnames:
            raise HTTPException(status_code=400, detail="CSV has no header")

        processed = 0
        upserts = 0
        skipped = 0
        errors: List[Dict[str, Any]] = []

        ops: List[UpdateOne] = []
        batch_size = 500

        def flush_ops() -> None:
            nonlocal upserts
            if not ops:
                return
            # ordered=False makes large imports much faster.
            col.bulk_write(ops, ordered=False)
            upserts += len(ops)
            ops.clear()

        for row in reader:
            processed += 1
            # Normalize keys once per row.
            nrow: Dict[str, Any] = {}
            for k, v in (row or {}).items():
                nk = _norm_csv_key(k)
                if not nk:
                    continue
                nrow[nk] = v.strip() if isinstance(v, str) else v

            symbol = _norm_symbol(
                str(
                    _pick(
                        nrow,
                        "tradingsymbol",
                        "trading_symbol",
                        "symbol",
                        "instrument",
                    )
                    or ""
                )
            )
            if not symbol:
                skipped += 1
                continue

            side = _pick(
                nrow,
                "transaction_type",
                "transactiontype",
                "trade_type",
                "tradetype",
                "buy_sell",
                "side",
            )
            side_norm = (str(side).strip().upper() if side is not None else "") or None

            qty = _safe_int(_pick(nrow, "quantity", "qty"))
            price = _safe_float(_pick(nrow, "price", "tradeprice", "rate"))
            avg = _safe_float(_pick(nrow, "average_price", "avgprice", "averageprice"))

            exchange = _pick(nrow, "exchange")
            product = _pick(nrow, "product")
            segment = _pick(nrow, "segment")
            series = _pick(nrow, "series")
            isin = _pick(nrow, "isin")
            auction = _pick(nrow, "auction")

            trade_id = _pick(nrow, "trade_id", "tradeid")
            order_id = _pick(nrow, "order_id", "orderid")

            trade_date = _pick(nrow, "trade_date", "tradedate", "date")
            exec_time = _pick(
                nrow,
                "order_execution_time",
                "orderexecutiontime",
                "executiontime",
                "timestamp",
                "time",
            )

            ts_dt = _parse_ts_flexible(exec_time) or _parse_ts_flexible(trade_date)
            if ts_dt is None:
                # Keep a timestamp for sorting; mark as unknown.
                ts_dt = datetime.utcnow()

            now = datetime.utcnow()
            doc = {
                "user_id": user_id,
                "kind": "import",
                "symbol": symbol,
                "exchange": exchange,
                "product": product,
                "segment": segment,
                "series": series,
                "isin": isin,
                "auction": auction,
                "transaction_type": side_norm,
                "quantity": qty,
                "price": price,
                "average_price": avg,
                "trade_id": trade_id,
                "order_id": order_id,
                "ts": ts_dt,
                "raw": nrow,
                "updated_at": now,
            }

            if trade_id:
                key = {"user_id": user_id, "kind": "import", "trade_id": trade_id}
            elif order_id:
                key = {"user_id": user_id, "kind": "import", "order_id": order_id}
            else:
                key = {
                    "user_id": user_id,
                    "kind": "import",
                    "symbol": symbol,
                    "ts": ts_dt,
                    "transaction_type": side_norm,
                    "quantity": qty,
                    "price": price,
                }

            try:
                ops.append(UpdateOne(key, {"$set": doc, "$setOnInsert": {"created_at": now}}, upsert=True))
                if len(ops) >= batch_size:
                    flush_ops()
            except Exception as e:
                if len(errors) < 20:
                    errors.append({"row": processed, "error": str(e)})

        # flush remaining
        try:
            flush_ops()
        except Exception as e:
            if len(errors) < 20:
                errors.append({"row": processed, "error": f"bulk_write_failed: {e}"})

        return {
            "status": "success",
            "processed": processed,
            "upserts": upserts,
            "skipped": skipped,
            "errors": errors,
        }
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Failed to import learning transactions CSV")
        raise HTTPException(status_code=500, detail=str(e))

# ============ ANALYSIS ============

@router.post("/analyze/{symbol}", summary="Analyze single symbol using ChatGPT")
async def analyze_symbol(
    symbol: str,
    payload: Dict[str, Any] = Body(default={}),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """DB-only analysis endpoint used by Stream UI.

    IMPORTANT: This endpoint must NOT call Zerodha or GPT.
    It returns the latest persisted analysis snapshot from
    `stock_analysis_snapshots` for the requested symbol.
    """
    try:
        snap = _get_latest_snapshot_by_symbol(db, symbol)
        if not snap:
            raise HTTPException(status_code=404, detail="No analysis snapshot available yet")

        analysis = snap.get("analysis") or {}
        if isinstance(analysis, dict):
            analysis.setdefault("symbol", (symbol or "").strip().upper())
            ts = snap.get("timestamp")
            if isinstance(ts, datetime):
                analysis.setdefault("timestamp", ts.isoformat())

            # Include persisted market_data (candles/quote) when available.
            # This remains DB-only and enables charting in the UI.
            if "market_data" not in analysis:
                md = snap.get("market_data")
                if isinstance(md, dict) and md:
                    analysis["market_data"] = md

        return {"status": "success", "symbol": symbol, "analysis": analysis}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Failed to fetch DB analysis snapshot for %s", symbol)
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/alerts-legacy", summary="Get user-specific alerts (legacy, DB-only)")
async def get_user_alerts(
    limit: int = Query(20, description="Max items per section"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """DB-only alerts endpoint.

    Builds user-specific opportunities from:
    - `user_likes` (is_active=True)
    - latest `stock_analysis_snapshots`
    - optional `orders` for Triggered/Closed bucketing

    IMPORTANT: Must NOT call Zerodha or GPT.
    """
    try:
        user_id = str(current_user.get("_id"))

        likes = list(db["user_likes"].find({"user_id": user_id, "is_active": True}, {"stock_id": 1}))
        stock_ids = [d.get("stock_id") for d in likes if d.get("stock_id")]
        if not stock_ids:
            return {
                "status": "ok",
                "data": {"top_signals": [], "monitoring": [], "triggered": [], "closed": []},
                "timestamp": datetime.utcnow(),
            }

        stocks = list(
            db["stocks"].find({"stock_id": {"$in": stock_ids}}, {"stock_id": 1, "symbol": 1, "instrument_token": 1})
        )
        by_stock_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}

        live_rows = list(
            db["live_movers"].find({"stock_id": {"$in": stock_ids}}, {"stock_id": 1, "rank": 1, "mover_type": 1})
        )
        live_by_stock_id = {r.get("stock_id"): r for r in live_rows if r.get("stock_id")}

        # Optional: latest order status per symbol
        symbols = [
            (by_stock_id.get(sid) or {}).get("symbol")
            for sid in stock_ids
            if (by_stock_id.get(sid) or {}).get("symbol")
        ]
        latest_order_by_symbol: Dict[str, Dict[str, Any]] = {}
        if symbols and "orders" in db.list_collection_names():
            cur = db["orders"].find({"user_id": user_id, "symbol": {"$in": symbols}}).sort([("created_at", -1)])
            for o in cur:
                sym = (o.get("symbol") or "").strip().upper()
                if sym and sym not in latest_order_by_symbol:
                    latest_order_by_symbol[sym] = o

        top_signals: List[Dict[str, Any]] = []
        monitoring: List[Dict[str, Any]] = []
        triggered: List[Dict[str, Any]] = []
        closed: List[Dict[str, Any]] = []

        for sid in stock_ids:
            stock = by_stock_id.get(sid)
            if not stock:
                continue
            symbol = (stock.get("symbol") or "").strip().upper()
            if not symbol:
                continue

            snaps = list(db["stock_analysis_snapshots"].find({"stock_id": sid}).sort([("timestamp", -1)]).limit(2))
            if not snaps:
                continue

            curr = _extract_analysis_fields(symbol, stock, snaps[0])
            prev = _extract_analysis_fields(symbol, stock, snaps[1]) if len(snaps) > 1 else None
            curr["what_changed"] = _what_changed(prev, curr)

            live = live_by_stock_id.get(sid) or {}
            curr["rank"] = live.get("rank")
            curr["mover_type"] = live.get("mover_type")

            order = latest_order_by_symbol.get(symbol)
            if order:
                status = (order.get("status") or "").strip().upper()
                if status in ("PLACED", "OPEN", "TRIGGERED"):
                    triggered.append(curr)
                    continue
                if status in ("COMPLETE", "CLOSED", "CANCELLED", "REJECTED"):
                    closed.append(curr)
                    continue

            decision = (curr.get("decision") or "HOLD").strip().upper()
            conf = _confidence_score(curr.get("confidence") or "")
            if decision in ("BUY", "SELL") and conf >= 2:
                top_signals.append(curr)
            else:
                monitoring.append(curr)

        def sort_key(row: Dict[str, Any]):
            rank = row.get("rank")
            rank_val = int(rank) if isinstance(rank, int) else 9999
            conf_val = _confidence_score(row.get("confidence") or "")
            ts = row.get("timestamp") or ""
            return (-conf_val, rank_val, ts)

        for arr in (top_signals, monitoring, triggered, closed):
            arr.sort(key=sort_key)

        return {
            "status": "ok",
            "data": {
                "top_signals": top_signals[:limit],
                "monitoring": monitoring[:limit],
                "triggered": triggered[:limit],
                "closed": closed[:limit],
            },
            "timestamp": datetime.utcnow(),
        }
    except Exception as e:
        logger.exception("Failed to build alerts")
        raise HTTPException(status_code=500, detail=str(e))


# ============ PORTFOLIO (USER STOCK LIST) ============


@router.get("/stocks/search", summary="Search stocks (shared catalog)")
async def search_stocks(
    q: str = Query("", description="Search by symbol/name"),
    limit: int = Query(20, ge=1, le=50),
    db=Depends(database.get_mongo_db),
):
    query = (q or "").strip()
    if not query:
        return {"status": "ok", "items": []}

    rx = {"$regex": re.escape(query), "$options": "i"}
    cur = db["stocks"].find(
        {"$or": [{"symbol": rx}, {"name": rx}, {"tradingsymbol": rx}]},
        {"_id": 0, "stock_id": 1, "symbol": 1, "exchange": 1, "instrument_token": 1, "name": 1},
    ).limit(limit)
    return {"status": "ok", "items": list(cur)}


@router.get("/stocks/{symbol}", summary="Get stock info (no analysis)")
async def get_stock_info(
    symbol: str,
    include_quote: int = Query(0, description="1 to fetch live quote via Zerodha"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    stock = _ensure_stock_identity(db, symbol)
    out = {
        "status": "ok",
        "stock": {
            "stock_id": stock.get("stock_id"),
            "symbol": stock.get("symbol"),
            "exchange": stock.get("exchange"),
            "instrument_token": stock.get("instrument_token"),
            "name": stock.get("name"),
            "segment": stock.get("segment"),
            "tick_size": stock.get("tick_size"),
            "lot_size": stock.get("lot_size"),
            "is_active": stock.get("is_active", True),
        },
        "quote": None,
    }

    if include_quote == 1:
        zerodha_client = get_zerodha_client_service(db, current_user)
        sym = _norm_symbol(symbol)
        exch = (stock.get("exchange") or "NSE").strip().upper()
        qd = zerodha_client.get_quote([f"{exch}:{sym}"])
        out["quote"] = qd.get(f"{exch}:{sym}") if isinstance(qd, dict) else None

        # Opportunistically backfill instrument_token using the user's session.
        if not stock.get("instrument_token") and isinstance(out.get("quote"), dict):
            token = out["quote"].get("instrument_token")
            if token:
                db["stocks"].update_one(
                    {"_id": stock.get("_id")},
                    {"$set": {"instrument_token": token, "instrument_updated_at": datetime.utcnow()}},
                )
                out["stock"]["instrument_token"] = token

    return out


@router.post("/portfolio/items", summary="Add/Upsert portfolio item")
async def upsert_portfolio_item(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id"))
    user_account_id = _norm_account_id(current_user.get("account_id"))
    account_id = _norm_account_id(payload.get("account_id")) or user_account_id
    if _norm_account_id(payload.get("account_id")) and user_account_id and account_id != user_account_id:
        raise HTTPException(status_code=403, detail="account_id does not match current user")
    symbol = _norm_symbol(payload.get("symbol"))
    exchange = (payload.get("exchange") or "NSE").strip().upper()
    status = (payload.get("status") or "ACTIVE").strip().upper()
    tags = _norm_tags(payload.get("tags"))
    nickname = (payload.get("nickname") or "").strip() or None
    notes = (payload.get("notes") or "").strip() or None

    if status not in ("ACTIVE", "INACTIVE"):
        raise HTTPException(status_code=400, detail="status must be ACTIVE or INACTIVE")

    stock = _ensure_stock_identity(db, symbol, exchange=exchange)
    if not stock.get("instrument_token"):
        # Use the *logged-in user's* Zerodha session to resolve token (no stocks duplication).
        try:
            zerodha_client = get_zerodha_client_service(db, current_user)
            resolved = _resolve_instrument_token_via_user_zerodha(zerodha_client, symbol, exchange)
        except Exception:
            resolved = None

        if resolved and resolved.get("instrument_token"):
            db["stocks"].update_one(
                {"_id": stock.get("_id")},
                {
                    "$set": {
                        "instrument_token": resolved.get("instrument_token"),
                        "exchange": resolved.get("exchange") or stock.get("exchange") or exchange,
                        "tradingsymbol": resolved.get("tradingsymbol") or stock.get("tradingsymbol") or symbol,
                        "instrument_updated_at": datetime.utcnow(),
                    }
                },
            )
            stock["instrument_token"] = resolved.get("instrument_token")
            if resolved.get("exchange"):
                stock["exchange"] = resolved.get("exchange")
            if resolved.get("tradingsymbol"):
                stock["tradingsymbol"] = resolved.get("tradingsymbol")
        else:
            raise HTTPException(
                status_code=400,
                detail="Instrument token not found for symbol in your Zerodha session. Verify the symbol/exchange and that your Zerodha connection is active.",
            )

    now = datetime.utcnow()
    doc = {
        "user_id": user_id,
        "account_id": account_id,
        "stock_id": stock.get("stock_id"),
        "symbol": stock.get("symbol"),
        "exchange": stock.get("exchange") or exchange,
        "status": status,
        "tags": tags,
        "nickname": nickname,
        "notes": notes,
        "updated_at": now,
    }

    existing = db["user_portfolio_items"].find_one(
        {"user_id": user_id, "stock_id": stock.get("stock_id"), **_portfolio_account_match(account_id)}
    )
    if existing:
        # Backfill account_id on older docs.
        if account_id and not _norm_account_id(existing.get("account_id")):
            doc["account_id"] = account_id
        db["user_portfolio_items"].update_one({"_id": existing["_id"]}, {"$set": doc})
        item_id = str(existing["_id"])
        created_at = existing.get("created_at")
    else:
        inserted = db["user_portfolio_items"].insert_one({**doc, "created_at": now})
        item_id = str(inserted.inserted_id)
        created_at = now

    return {
        "status": "ok",
        "item": {
            "portfolio_item_id": item_id,
            "account_id": account_id,
            "stock_id": stock.get("stock_id"),
            "symbol": stock.get("symbol"),
            "exchange": stock.get("exchange") or exchange,
            "instrument_token": stock.get("instrument_token"),
            "name": stock.get("name"),
            "status": status,
            "tags": tags,
            "nickname": nickname,
            "notes": notes,
            "created_at": created_at,
            "updated_at": now,
        },
    }


@router.get("/portfolio/items", summary="List portfolio items")
async def list_portfolio_items(
    q: str = Query("", description="Search symbol/nickname"),
    tag: Optional[str] = Query(None, description="Filter by tag"),
    status: Optional[str] = Query(None, description="ACTIVE or INACTIVE"),
    account_id: Optional[str] = Query(None, description="Account scope (defaults to current user's account_id)"),
    limit: int = Query(50, ge=1, le=200),
    skip: int = Query(0, ge=0),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id"))
    user_account_id = _norm_account_id(current_user.get("account_id"))

    effective_account_id = _norm_account_id(account_id) or user_account_id
    if _norm_account_id(account_id) and user_account_id and effective_account_id != user_account_id:
        raise HTTPException(status_code=403, detail="account_id does not match current user")

    query: Dict[str, Any] = {"user_id": user_id}
    and_clauses: List[Dict[str, Any]] = [_portfolio_account_match(effective_account_id)]

    if status:
        st = status.strip().upper()
        if st not in ("ACTIVE", "INACTIVE"):
            raise HTTPException(status_code=400, detail="status must be ACTIVE or INACTIVE")
        query["status"] = st

    if tag:
        query["tags"] = (tag or "").strip().lower()

    text = (q or "").strip()
    if text:
        rx = {"$regex": re.escape(text), "$options": "i"}
        and_clauses.append({"$or": [{"symbol": rx}, {"nickname": rx}]})

    if and_clauses:
        query["$and"] = and_clauses

    cur = (
        db["user_portfolio_items"]
        .find(query)
        .sort([("updated_at", -1), ("created_at", -1)])
        .skip(skip)
        .limit(limit)
    )

    items = list(cur)
    stock_ids = [it.get("stock_id") for it in items if it.get("stock_id")]
    stocks = list(
        db["stocks"].find({"stock_id": {"$in": stock_ids}}, {"_id": 0, "stock_id": 1, "instrument_token": 1, "name": 1})
    )
    by_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}

    out: List[Dict[str, Any]] = []
    for it in items:
        sid = it.get("stock_id")
        stock = by_id.get(sid) or {}
        out.append(
            {
                "portfolio_item_id": str(it.get("_id")),
                "account_id": it.get("account_id") or effective_account_id or None,
                "stock_id": sid,
                "symbol": it.get("symbol"),
                "exchange": it.get("exchange"),
                "instrument_token": stock.get("instrument_token"),
                "name": stock.get("name"),
                "status": it.get("status"),
                "tags": it.get("tags") or [],
                "nickname": it.get("nickname"),
                "notes": it.get("notes"),
                "created_at": it.get("created_at"),
                "updated_at": it.get("updated_at"),
            }
        )

    total = db["user_portfolio_items"].count_documents(query)
    return {"status": "ok", "total": total, "items": out}


@router.patch("/portfolio/items/{portfolio_item_id}", summary="Update portfolio item")
async def update_portfolio_item(
    portfolio_item_id: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id"))
    account_id = _norm_account_id(current_user.get("account_id"))
    try:
        oid = ObjectId(portfolio_item_id)
    except Exception:
        raise HTTPException(status_code=400, detail="Invalid portfolio_item_id")

    existing = db["user_portfolio_items"].find_one(
        {"_id": oid, "user_id": user_id, **_portfolio_account_match(account_id)}
    )
    if not existing:
        raise HTTPException(status_code=404, detail="Portfolio item not found")

    patch: Dict[str, Any] = {"updated_at": datetime.utcnow()}

    # Backfill account_id on older docs.
    if account_id and not _norm_account_id(existing.get("account_id")):
        patch["account_id"] = account_id

    if "status" in payload:
        st = (payload.get("status") or "").strip().upper()
        if st not in ("ACTIVE", "INACTIVE"):
            raise HTTPException(status_code=400, detail="status must be ACTIVE or INACTIVE")
        patch["status"] = st

    if "tags" in payload:
        patch["tags"] = _norm_tags(payload.get("tags"))

    if "nickname" in payload:
        nick = (payload.get("nickname") or "").strip()
        patch["nickname"] = nick or None

    if "notes" in payload:
        notes = (payload.get("notes") or "").strip()
        patch["notes"] = notes or None

    db["user_portfolio_items"].update_one(
        {"_id": oid, "user_id": user_id, **_portfolio_account_match(account_id)},
        {"$set": patch},
    )
    updated = db["user_portfolio_items"].find_one(
        {"_id": oid, "user_id": user_id, **_portfolio_account_match(account_id)}
    )
    return {
        "status": "ok",
        "item": {
            "portfolio_item_id": str(updated.get("_id")),
            "account_id": updated.get("account_id") or account_id or None,
            "stock_id": updated.get("stock_id"),
            "symbol": updated.get("symbol"),
            "exchange": updated.get("exchange"),
            "status": updated.get("status"),
            "tags": updated.get("tags") or [],
            "nickname": updated.get("nickname"),
            "notes": updated.get("notes"),
            "created_at": updated.get("created_at"),
            "updated_at": updated.get("updated_at"),
        },
    }


@router.delete("/portfolio/items/{portfolio_item_id}", summary="Delete portfolio item")
async def delete_portfolio_item(
    portfolio_item_id: str,
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id"))
    account_id = _norm_account_id(current_user.get("account_id"))
    try:
        oid = ObjectId(portfolio_item_id)
    except Exception:
        raise HTTPException(status_code=400, detail="Invalid portfolio_item_id")

    res = db["user_portfolio_items"].delete_one(
        {"_id": oid, "user_id": user_id, **_portfolio_account_match(account_id)}
    )
    if res.deleted_count != 1:
        raise HTTPException(status_code=404, detail="Portfolio item not found")
    return {"status": "ok", "deleted": True}


@router.get("/portfolio/tags", summary="List user's tags")
async def list_portfolio_tags(
    account_id: Optional[str] = Query(None, description="Account scope (defaults to current user's account_id)"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    user_id = str(current_user.get("_id"))
    user_account_id = _norm_account_id(current_user.get("account_id"))
    effective_account_id = _norm_account_id(account_id) or user_account_id
    if _norm_account_id(account_id) and user_account_id and effective_account_id != user_account_id:
        raise HTTPException(status_code=403, detail="account_id does not match current user")
    pipeline = [
        {"$match": {"user_id": user_id, **_portfolio_account_match(effective_account_id)}},
        {"$unwind": "$tags"},
        {"$group": {"_id": "$tags", "count": {"$sum": 1}}},
        {"$sort": {"count": -1, "_id": 1}},
    ]
    rows = list(db["user_portfolio_items"].aggregate(pipeline))
    return {"status": "ok", "tags": [{"tag": r.get("_id"), "count": r.get("count", 0)} for r in rows if r.get("_id")]}

@router.post("/analyze/bulk", summary="Bulk analyze multiple symbols")
async def bulk_analyze(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """
    Bulk analyze multiple symbols
    Payload: {
        "symbols": ["RELIANCE", "TCS", "INFY"],
        "analysis_type": "short_sell" | "long_buy" | "general",
        "timeframes": ["5minute", "15minute"],
        "max_concurrent": 5
    }
    """
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = bulk_analyze_service(
            db=db,
            zerodha_client=zerodha_client,
            symbols=payload.get("symbols", []),
            analysis_type=payload.get("analysis_type", "general"),
            timeframes=payload.get("timeframes", DEFAULT_TIMEFRAMES),
            max_concurrent=payload.get("max_concurrent", 5),
            user_id=str(current_user.get("_id"))
        )
        return {"status": "success", "analyzed": len(result), "results": result}
    except Exception as e:
        logger.exception("Bulk analysis failed")
        raise HTTPException(status_code=500, detail=str(e))


# ============ PORTFOLIO STREAM (ON-DEMAND) ============


def _format_analysis_for_stream_row(analysis: Dict[str, Any], rank: int, mover: str) -> Dict[str, Any]:
    symbol = (analysis.get("symbol") or "").strip().upper()

    raw_targets = analysis.get("targets")
    exit_target = None
    if isinstance(raw_targets, (list, tuple)) and raw_targets:
        exit_target = raw_targets[0]

    return {
        "symbol": symbol,
        "rank": rank,
        "decision": analysis.get("decision", "HOLD"),
        "confidence": analysis.get("confidence", "LOW"),
        "instrument_token": analysis.get("instrument_token"),
        "entry_price": analysis.get("entry_price"),
        "stop_loss": analysis.get("stop_loss"),
        "price_target": analysis.get("price_target"),
        "risk_reward_ratio": analysis.get("risk_reward_ratio"),
        "current_price": analysis.get("current_price"),
        "metrics": {
            "decision": analysis.get("decision", "HOLD"),
            "confidence": analysis.get("confidence", "LOW"),
        },
        "rationale": analysis.get("rationale", []),
        "features": analysis.get("features", {}),
        "technical_indicators": analysis.get("technical_indicators", {}),
        "targets": {
            "entry": analysis.get("entry_price"),
            "exit": analysis.get("price_target") or exit_target,
        },
        "timestamp": analysis.get("timestamp"),
        "mover": mover,
    }


@router.post("/portfolio/live", summary="On-demand portfolio live analysis")
async def portfolio_live_analysis(
    payload: Dict[str, Any] = Body(default={}),
    freshness_minutes: int = Query(
        DEFAULT_PORTFOLIO_LIVE_FRESHNESS_MINUTES,
        ge=1,
        le=60,
        description="Use cached analysis if within this many minutes",
    ),
    limit: int = Query(20, ge=1, le=200, description="Max ACTIVE portfolio symbols to consider"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Portfolio streaming endpoint.

    - Reads ACTIVE portfolio items for the current user/account.
    - Uses recent `stock_analysis_snapshots` (<= freshness_minutes) when available.
    - Otherwise calls the existing analysis engine (Zerodha + GPT) and persists a new snapshot.

    NOTE: This is intentionally NOT the movers endpoint.
    """

    user_id = str(current_user.get("_id"))
    account_id = _norm_account_id(current_user.get("account_id"))
    cutoff = datetime.utcnow() - timedelta(minutes=int(freshness_minutes))

    items = list(
        db["user_portfolio_items"]
        .find({"user_id": user_id, "status": "ACTIVE", **_portfolio_account_match(account_id)})
        .sort([("updated_at", -1), ("created_at", -1)])
        .limit(limit)
    )

    if not items:
        return {
            "status": "success",
            "scanned": 0,
            "results": [],
            "source": "portfolio",
            "cached": 0,
            "computed": 0,
            "freshness_minutes": freshness_minutes,
            "timestamp": datetime.utcnow(),
        }

    stock_ids = [it.get("stock_id") for it in items if it.get("stock_id")]
    stocks = list(
        db["stocks"].find(
            {"stock_id": {"$in": stock_ids}},
            {"_id": 0, "stock_id": 1, "symbol": 1, "exchange": 1, "instrument_token": 1, "name": 1},
        )
    )
    by_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}

    cached_by_symbol: Dict[str, Dict[str, Any]] = {}
    if stock_ids:
        cur = (
            db["stock_analysis_snapshots"]
            .find(
                {"stock_id": {"$in": stock_ids}, "timestamp": {"$gte": cutoff}},
                {"_id": 0, "stock_id": 1, "timestamp": 1, "analysis": 1},
            )
            .sort([("timestamp", -1)])
        )
        for snap in cur:
            sid = snap.get("stock_id")
            stock = by_id.get(sid) or {}
            sym = (stock.get("symbol") or "").strip().upper()
            if not sym or sym in cached_by_symbol:
                continue
            analysis = snap.get("analysis")
            if not isinstance(analysis, dict):
                continue
            analysis.setdefault("symbol", sym)
            ts = snap.get("timestamp")
            if isinstance(ts, datetime):
                analysis.setdefault("timestamp", ts.isoformat())
            if analysis.get("instrument_token") is None and stock.get("instrument_token") is not None:
                analysis["instrument_token"] = stock.get("instrument_token")
            cached_by_symbol[sym] = analysis

    portfolio_symbols: List[str] = []
    to_compute: List[str] = []
    for it in items:
        sid = it.get("stock_id")
        stock = by_id.get(sid) or {}
        sym = (it.get("symbol") or stock.get("symbol") or "").strip().upper()
        if not sym:
            continue
        portfolio_symbols.append(sym)
        if sym not in cached_by_symbol:
            to_compute.append(sym)

    if to_compute:
        zerodha_client = get_zerodha_client_service(db, current_user)
        computed_results = bulk_analyze_service(
            db=db,
            zerodha_client=zerodha_client,
            symbols=to_compute,
            analysis_type=(payload.get("analysis_type") or "general"),
            timeframes=payload.get("timeframes") or DEFAULT_TIMEFRAMES,
            max_concurrent=int(payload.get("max_concurrent") or 5),
            user_id=user_id,
        )

        now = datetime.utcnow()
        for analysis in computed_results or []:
            if not isinstance(analysis, dict):
                continue
            sym = (analysis.get("symbol") or "").strip().upper()
            if not sym:
                continue

            sid = None
            for it in items:
                if (it.get("symbol") or "").strip().upper() == sym:
                    sid = it.get("stock_id")
                    break
            if not sid:
                continue

            targets = analysis.get("targets") if isinstance(analysis.get("targets"), list) else None
            primary_target = targets[0] if targets else None

            snapshot_doc: Dict[str, Any] = {
                "stock_id": sid,
                "timestamp": now,
                "decision": analysis.get("decision"),
                "confidence": analysis.get("confidence"),
                "entry": analysis.get("entry_price"),
                "stop_loss": analysis.get("stop_loss"),
                "target": analysis.get("price_target") or analysis.get("target") or primary_target,
                "reason": analysis.get("rationale"),
                "source": "PORTFOLIO",
                "analysis": analysis,
                "market_data": analysis.get("market_data"),
                "features": analysis.get("features"),
            }
            try:
                db["stock_analysis_snapshots"].insert_one(snapshot_doc)
            except Exception:
                logger.exception("Failed to persist portfolio snapshot for %s", sym)

            cached_by_symbol[sym] = analysis

    formatted_results: List[Dict[str, Any]] = []
    rank = 1
    for sym in portfolio_symbols:
        analysis = cached_by_symbol.get(sym)
        if not analysis or not isinstance(analysis, dict):
            continue
        formatted_results.append(_format_analysis_for_stream_row(analysis, rank=rank, mover="portfolio"))
        rank += 1

    return {
        "status": "success",
        "scanned": len(portfolio_symbols),
        "results": formatted_results,
        "source": "portfolio",
        "cached": len(portfolio_symbols) - len(to_compute),
        "computed": len(to_compute),
        "freshness_minutes": freshness_minutes,
        "timestamp": datetime.utcnow(),
    }

@router.get("/signals", summary="Get user's latest signals/analysis")
async def get_signals(
    limit: int = Query(20, description="Maximum number of signals"),
    symbol: Optional[str] = Query(None, description="Filter by symbol"),
    decision: Optional[str] = Query(None, description="Filter by decision: BUY, SELL, HOLD"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get user's latest trading signals from ChatGPT analysis"""
    try:
        result = get_user_signals_service(
            db=db,
            user_id=str(current_user.get("_id")),
            limit=limit,
            symbol=symbol,
            decision=decision
        )
        return {"status": "success", "signals": result}
    except Exception as e:
        logger.exception("Failed to get signals")
        raise HTTPException(status_code=500, detail=str(e))

# ============ INTERACTIVE CHAT ============

@router.post("/chat/{symbol}", summary="Interactive chat about a specific symbol")
async def chat_with_symbol(
    symbol: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """
    Interactive chat about a symbol with real-time market data
    Payload: {
        "message": "What's the RSI looking like?",
        "include_fresh_data": true,
        "conversation_id": "optional-uuid"
    }
    """
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = chat_with_stock_service(
            db=db,
            zerodha_client=zerodha_client,
            symbol=symbol,
            message=payload.get("message", ""),
            user_id=str(current_user.get("_id")),
            conversation_id=payload.get("conversation_id"),
            include_fresh_data=payload.get("include_fresh_data", True)
        )
        return {"status": "success", "response": result}
    except Exception as e:
        logger.exception(f"Chat failed for symbol {symbol}")
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/learning/chat/{symbol}", summary="Learning chat about a trade/symbol (no fresh Zerodha data)")
async def learning_chat_with_symbol(
    symbol: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """Learning chat that stays within /learning.

    This endpoint does NOT require Zerodha access_token because it forces
    include_fresh_data=False and relies on DB snapshots + user-provided context.
    """
    try:
        result = chat_with_stock_service(
            db=db,
            zerodha_client=None,  # safe when include_fresh_data=False
            symbol=symbol,
            message=payload.get("message", ""),
            user_id=str(current_user.get("_id")),
            conversation_id=payload.get("conversation_id"),
            include_fresh_data=False,
        )
        return {"status": "success", "response": result}
    except Exception as e:
        logger.exception(f"Learning chat failed for symbol {symbol}")
        raise HTTPException(status_code=500, detail=str(e))

@router.get("/chat/{symbol}/history", summary="Get chat history for a symbol")
async def get_chat_history(
    symbol: str,
    limit: int = Query(50, description="Maximum messages"),
    conversation_id: Optional[str] = Query(None, description="Optional conversation id"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get chat history for a specific symbol"""
    try:
        user_id = str(current_user.get("_id"))
        q = {"user_id": user_id, "symbol": symbol}
        if conversation_id:
            q["conversation_id"] = conversation_id

        chats = list(db["chats"].find(q).sort("created_at", -1).limit(limit))
        
        for chat in chats:
            chat["_id"] = str(chat["_id"])
        
        return {"status": "success", "history": chats}
    except Exception as e:
        logger.exception(f"Failed to get chat history for {symbol}")
        raise HTTPException(status_code=500, detail=str(e))

# ============ ORDER MANAGEMENT ============

@router.post("/order/place", summary="Place order based on ChatGPT analysis")
async def place_trading_order(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """
    Place trading order with ChatGPT risk validation
    Payload: {
        "symbol": "RELIANCE",
        "action": "BUY" | "SELL",
        "quantity": 10,
        "order_type": "MARKET" | "LIMIT",
        "price": 2500.0,  // Required for LIMIT orders
        "analysis_id": "optional-analysis-reference",
        "confirm": true  // Required for execution
    }
    """
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        result = place_order_service(
            db=db,
            zerodha_client=zerodha_client,
            user_id=str(current_user.get("_id")),
            symbol=payload.get("symbol"),
            action=payload.get("action"),
            quantity=payload.get("quantity"),
            order_type=payload.get("order_type", "MARKET"),
            price=payload.get("price"),
            analysis_id=payload.get("analysis_id"),
            confirmed=payload.get("confirm", False)
        )
        return {"status": "success", "order": result}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Order placement failed")
        raise HTTPException(status_code=500, detail=str(e))

@router.get("/orders", summary="Get user's order history")
async def get_orders(
    limit: int = Query(50, description="Maximum orders"),
    status: Optional[str] = Query(None, description="Filter by status"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get user's order history"""
    try:
        user_id = str(current_user.get("_id"))
        query = {"user_id": user_id}
        if status:
            query["status"] = status
            
        orders = list(db["orders"].find(query).sort("created_at", -1).limit(limit))
        
        for order in orders:
            order["_id"] = str(order["_id"])
        
        return {"status": "success", "orders": orders}
    except Exception as e:
        logger.exception("Failed to get orders")
        raise HTTPException(status_code=500, detail=str(e))

# ============ LEGACY COMPATIBILITY ============

@router.get("/live", summary="Legacy compatibility - get live analysis")
async def get_live_analysis(
    mover: str = Query("gainers", description="gainers or losers"),
    limit: int = Query(10, description="Number of symbols to analyze"),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """DB-only legacy endpoint.

    IMPORTANT: Must NOT call Zerodha or GPT. Uses latest
    `stock_analysis_snapshots` for symbols in `live_movers`.
    """
    try:
        mover_type = _normalize_mover_param(mover)
        if mover_type == "BOTH":
            symbols = (_get_symbols_from_live_movers(db, "GAINER", limit) + _get_symbols_from_live_movers(db, "LOSER", limit))[:limit]
        else:
            symbols = _get_symbols_from_live_movers(db, mover_type, limit)

        if not symbols:
            return {"status": "success", "scanned": 0, "results": []}

        formatted_results = []
        for sym in symbols:
            snap = _get_latest_snapshot_by_symbol(db, sym)
            if not snap:
                continue

            result = snap.get("analysis") or {}
            if not isinstance(result, dict):
                continue

            raw_targets = result.get("targets")
            exit_target = None
            if isinstance(raw_targets, (list, tuple)) and raw_targets:
                exit_target = raw_targets[0]

            formatted_results.append({
                "symbol": (result.get("symbol") or sym).strip().upper(),
                "rank": len(formatted_results) + 1,
                "decision": result.get("decision", "HOLD"),
                "confidence": result.get("confidence", "LOW"),
                "instrument_token": result.get("instrument_token"),
                # Core numeric fields from GPT analysis
                "entry_price": result.get("entry_price"),
                "stop_loss": result.get("stop_loss"),
                "price_target": result.get("price_target"),
                "risk_reward_ratio": result.get("risk_reward_ratio"),
                "current_price": result.get("current_price"),
                "metrics": {
                    "decision": result.get("decision", "HOLD"),
                    "confidence": result.get("confidence", "LOW"),
                    "volume_drop": 0.0,
                    "momentum_drop": 0.0,
                    "ema_gap": 0.0,
                    "rsi_divergence": False,
                    "macd_divergence": False,
                    "wedge": False,
                    "pivot_level": "",
                    "near_upper_circuit": False,
                },
                # Full GPT rationale kept only in popup on frontend
                "rationale": result.get("rationale", []),
                # Compact technical + strategy features for popup
                "features": result.get("features", {}),
                "technical_indicators": result.get("technical_indicators", {}),
                "targets": {
                    "entry": result.get("entry_price"),
                    "exit": exit_target
                },
                "timestamp": result.get("timestamp")
            })
        
        return {
            "status": "success",
            "scanned": len(symbols),
            "results": formatted_results
        }
        
    except Exception as e:
        logger.exception("Legacy live analysis failed")
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/nifty50/live", summary="DB-only: latest NIFTY50 analyses")
async def get_nifty50_live(
    limit: int = Query(
        DEFAULT_NIFTY50_MAX_ANALYSES,
        ge=1,
        le=DEFAULT_NIFTY50_MAX_ANALYSES,
        description="Max NIFTY50 symbols to return (capped by NIFTY50_MAX_ANALYSES)",
    ),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails),
):
    """DB-only endpoint for the NIFTY50 tab.

    IMPORTANT:
    - Must NOT call NSE, Zerodha, or GPT.
    - Background loop populates `index_constituents` and writes snapshots.
    """

    try:
        doc = db["index_constituents"].find_one({"index": "NIFTY 50"})
        symbols = doc.get("symbols") if isinstance(doc, dict) else None
        if not isinstance(symbols, list) or not symbols:
            return {
                "status": "success",
                "scanned": 0,
                "results": [],
                "source": "nifty50",
                "timestamp": datetime.utcnow(),
            }

        ordered = [str(s).strip().upper() for s in symbols if str(s).strip()]
        ordered = ordered[: int(limit)]
        if not ordered:
            return {
                "status": "success",
                "scanned": 0,
                "results": [],
                "source": "nifty50",
                "timestamp": datetime.utcnow(),
            }

        stocks = list(
            db["stocks"].find(
                {"symbol": {"$in": ordered}},
                {"_id": 0, "stock_id": 1, "symbol": 1, "exchange": 1, "instrument_token": 1, "name": 1},
            )
        )
        by_id = {s.get("stock_id"): s for s in stocks if s.get("stock_id")}
        by_symbol = {(s.get("symbol") or "").strip().upper(): s for s in stocks if (s.get("symbol") or "").strip()}
        stock_ids = [by_symbol[s].get("stock_id") for s in ordered if by_symbol.get(s) and by_symbol[s].get("stock_id")]

        cached_by_symbol: Dict[str, Dict[str, Any]] = {}
        if stock_ids:
            cur = (
                db["stock_analysis_snapshots"]
                .find(
                    {"stock_id": {"$in": stock_ids}},
                    {"_id": 0, "stock_id": 1, "timestamp": 1, "analysis": 1},
                )
                .sort([("timestamp", -1)])
            )
            for snap in cur:
                sid = snap.get("stock_id")
                stock = by_id.get(sid) or {}
                sym = (stock.get("symbol") or "").strip().upper()
                if not sym or sym in cached_by_symbol:
                    continue
                analysis = snap.get("analysis")
                if not isinstance(analysis, dict):
                    continue
                analysis.setdefault("symbol", sym)
                ts = snap.get("timestamp")
                if isinstance(ts, datetime):
                    analysis.setdefault("timestamp", ts.isoformat())
                if analysis.get("instrument_token") is None and stock.get("instrument_token") is not None:
                    analysis["instrument_token"] = stock.get("instrument_token")
                cached_by_symbol[sym] = analysis

        formatted_results: List[Dict[str, Any]] = []
        rank = 1
        for sym in ordered:
            analysis = cached_by_symbol.get(sym)
            if not analysis or not isinstance(analysis, dict):
                continue
            formatted_results.append(_format_analysis_for_stream_row(analysis, rank=rank, mover="nifty50"))
            rank += 1

        return {
            "status": "success",
            "scanned": len(ordered),
            "results": formatted_results,
            "source": "nifty50",
            "timestamp": datetime.utcnow(),
        }
    except Exception as e:
        logger.exception("Failed to get NIFTY50 live")
        raise HTTPException(status_code=500, detail=str(e))

# ============ FRONTEND COMPATIBILITY ============

@router.get("/profile", summary="Get Zerodha profile status")
async def get_profile(
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Get Zerodha profile and authentication status"""
    try:
        zerodha_client = get_zerodha_client_service(db, current_user)
        
        # Try to get profile to verify connection
        try:
            profile = zerodha_client.profile()
            return {
                "status": "ok",
                "authenticated": True,
                "profile": profile
            }
        except Exception as e:
            return {
                "status": "error",
                "authenticated": False,
                "message": "Zerodha authentication required"
            }
    except HTTPException as e:
        if e.status_code == 404:
            return {"status": "error", "authenticated": False, "message": "Zerodha settings not found"}
        elif e.status_code == 403:
            return {"status": "error", "authenticated": False, "message": "Zerodha not authenticated"}
        raise
    except Exception as e:
        logger.exception("Profile check failed")
        return {"status": "error", "authenticated": False, "message": str(e)}

@router.post("/stream/feedback/{analysis_id}", summary="Provide feedback on analysis")
async def stream_feedback(
    analysis_id: str,
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Provide thumbs up/down feedback on analysis.

    Stored in `user_likes` as a per-user preference signal:
    - feedback="up"   -> is_active=True
    - feedback="down" -> is_active=False

    We resolve `stock_id` by looking up the snapshot that contains
    `analysis.analysis_id == analysis_id`.
    """
    try:
        feedback = payload.get("feedback")  # "up" or "down"
        if feedback not in ("up", "down"):
            raise HTTPException(status_code=400, detail="feedback must be 'up' or 'down'")

        snap = db["stock_analysis_snapshots"].find_one({"analysis.analysis_id": analysis_id})
        if not snap:
            # fallback for cases where older code wrote ObjectId as string
            snap = db["stock_analysis_snapshots"].find_one({"analysis.analysis_id": str(analysis_id)})
        if not snap:
            raise HTTPException(status_code=404, detail="Analysis snapshot not found")

        stock_id = snap.get("stock_id")
        if not stock_id:
            raise HTTPException(status_code=404, detail="Snapshot missing stock_id")

        user_id = str(current_user.get("_id"))
        now = datetime.utcnow()
        is_active = feedback == "up"
        db["user_likes"].update_one(
            {"user_id": user_id, "stock_id": stock_id},
            {
                "$set": {
                    "is_active": is_active,
                    "last_feedback": feedback,
                    "updated_at": now,
                },
                "$setOnInsert": {
                    "created_at": now,
                },
            },
            upsert=True,
        )

        return {"status": "ok", "feedback": feedback, "stock_id": stock_id, "is_active": is_active}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("Feedback update failed")
        raise HTTPException(status_code=500, detail=str(e))

@router.post("/callback", summary="Zerodha OAuth callback")
async def oauth_callback(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db),
    current_user=Depends(get_current_userdetails)
):
    """Handle Zerodha OAuth callback"""
    try:
        request_token = payload.get("request_token")
        user_id = str(current_user.get("_id"))

        if not request_token:
            raise HTTPException(status_code=400, detail="Missing request token")

        settings = db["zerodha_settings"].find_one({"user_id": user_id})
        if not settings:
            raise HTTPException(status_code=404, detail="Zerodha settings not found")

        from app.v1.services.zerodha.client import ZerodhaClient
        client = ZerodhaClient(settings["api_key"], settings["api_secret"])
        session = client.generate_session(request_token)

        # Update settings with access token
        db["zerodha_settings"].update_one(
            {"user_id": user_id},
            {"$set": {
                "access_token": session["access_token"],
                "login_time": datetime.utcnow(),
                "status": "active"
            }}
        )

        return {"status": "connected", "message": "Zerodha authentication successful"}
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("OAuth callback failed")
        raise HTTPException(status_code=500, detail=str(e))

@router.post("/postback", summary="Zerodha postback handler")
async def zerodha_postback(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db)
):
    """Handle Zerodha postback notifications"""
    try:
        # Store postback
        postback_doc = {
            "source": "zerodha",
            "payload": payload,
            "received_at": datetime.utcnow()
        }
        db["zerodha_postbacks"].insert_one(postback_doc)
        
        # Update order status if applicable
        order_id = payload.get("order_id")
        if order_id:
            db["orders"].update_one(
                {"zerodha_order_id": order_id},
                {"$set": {
                    "status": payload.get("status"),
                    "filled_quantity": payload.get("filled_quantity", 0),
                    "average_price": payload.get("average_price"),
                    "updated_at": datetime.utcnow()
                }}
            )
        
        return {"status": "received"}
    except Exception as e:
        logger.exception("Postback processing failed")
        raise HTTPException(status_code=500, detail=str(e))

# ============ WEBHOOKS ============

@router.post("/webhook/zerodha", summary="Zerodha order update webhook")
async def zerodha_webhook(
    payload: Dict[str, Any] = Body(...),
    db=Depends(database.get_mongo_db)
):
    """Handle Zerodha order update webhooks"""
    try:
        # Store webhook payload
        webhook_doc = {
            "source": "zerodha",
            "payload": payload,
            "received_at": datetime.utcnow()
        }
        db["webhooks"].insert_one(webhook_doc)
        
        # Update order status if applicable
        order_id = payload.get("order_id")
        if order_id:
            db["orders"].update_one(
                {"zerodha_order_id": order_id},
                {"$set": {
                    "status": payload.get("status"),
                    "filled_quantity": payload.get("filled_quantity", 0),
                    "average_price": payload.get("average_price"),
                    "updated_at": datetime.utcnow()
                }}
            )
        
        return {"status": "received"}
    except Exception as e:
        logger.exception("Webhook processing failed")
        raise HTTPException(status_code=500, detail=str(e))