"""Legacy service: removed.

The platform now uses teGPT services under `app.v1.services.tegpt.*`.

This file is intentionally kept as a stub to prevent accidental imports
from breaking at runtime; any legacy callers will get a clear 410.
"""

from typing import Any, Dict, List, Optional

from fastapi import HTTPException


def _gone() -> None:
    raise HTTPException(status_code=410, detail="tradeGPT legacy service has been removed; use teGPT")


# --- Legacy function names (kept only to fail loudly if called) ---
def refresh_movers_service(db) -> Dict[str, Any]:
    _gone()


def get_top10_service(db) -> List[str]:
    _gone()


def refresh_instruments_service(db, zerodha, force: bool = False) -> Dict[str, Any]:
    _gone()


def create_snapshot_service(db, zerodha, symbol: str) -> Dict[str, Any]:
    _gone()


def get_snapshot_service(db, snapshot_id: str) -> Optional[Dict[str, Any]]:
    _gone()


def chat_symbol_service(db, zerodha, user: Dict[str, Any], symbol: str, payload: Dict[str, Any]):
    _gone()


def call_chatgpt_analyze_service(snapshot: Dict[str, Any], question: Optional[str] = None) -> Dict[str, Any]:
    _gone()


def validate_openai_config() -> Dict[str, Any]:
    _gone()


def get_live_signals_service(db, zerodha, mover: str = "gainers", limit: int = 10, user_id: Optional[str] = None, question: str = ""):
    _gone()


def place_short_order_service(*args, **kwargs):
    _gone()


def kite_postback_service(*args, **kwargs):
    _gone()
        from app.v1.services.zerolive.list import TopMoversFetcher  # type: ignore
        fetcher = TopMoversFetcher(db)
        gainers, losers = fetcher.fetch_top_movers()
        def _norm(arr):
            if not arr: return []
            out=[]
            for item in arr:
                if isinstance(item, dict):
                    for k in ("symbol","nse_symbol","tradingsymbol","ticker","name"):
                        if item.get(k):
                            out.append(item.get(k)); break
                elif isinstance(item, str):
                    out.append(item)
            return out
        gainers = _norm(gainers); losers = _norm(losers)
    except Exception:
        logger.info("TopMoversFetcher not available or failed; using empty movers")
        gainers = []
        losers = []

    # Build canonical record with current UTC timestamp.
    record: Dict[str, Any] = {
        "type": "latest",
        "fetched_at": datetime.utcnow(),
        "gainers": gainers,
        "losers": losers,
        "merged": list(dict.fromkeys(gainers + losers)),
    }

    # Keep a single up-to-date view for live usage.
    db["movers"].update_one({"type": "latest"}, {"$set": record}, upsert=True)

    # Also append a snapshot into history for analytics/backtesting.
    history_doc = dict(record)
    history_doc["type"] = "snapshot"
    db["movers_history"].insert_one(history_doc)

    return record

def get_top10_service(db) -> List[str]:
    doc = db["movers"].find_one({"type":"latest"}) or {}
    merged = doc.get("merged", []) if doc else []
    return merged[:10]

# ---------------- INSTRUMENT CACHE (minimal helpers) ----------------
def _load_instruments_cache(db) -> List[Dict[str, Any]]:
    try:
        cached = db["zerodha_instruments"].find_one({"type":"nse_equity"})
        if cached and isinstance(cached.get("instruments"), list):
            return cached["instruments"]
        cursor = db["zerodha_instruments"].find({}, {"_id":0})
        instruments = list(cursor)
        if instruments and isinstance(instruments[0], dict) and "tradingsymbol" in instruments[0]:
            return instruments
    except Exception:
        logger.exception("Error reading instrument cache")
    return []

def _get_instrument_token_from_db(db, symbol: str) -> Optional[int]:
    """Back-compat helper: resolve token from the `stocks` master list."""
    sym = (symbol or "").strip().upper()
    if not sym:
        return None
    try:
        stock = db["stocks"].find_one({"symbol": sym, "exchange": "NSE"}, {"instrument_token": 1})
        tok = stock.get("instrument_token") if isinstance(stock, dict) else None
        return int(tok) if tok is not None else None
    except Exception:
        logger.exception("stocks master lookup error for %s", sym)
        return None

def refresh_instruments_service(db, zerodha: ZerodhaClient, force: bool = False) -> Dict[str, Any]:
    """Back-compat endpoint: refresh the `stocks` master list.

    NOTE: Stocks are created only from Zerodha instruments; this is the only allowed creation path.
    """
    try:
        res = refresh_stocks_master(db=db, zerodha_client=zerodha, exchanges=["NSE"], allow_cache_write=True)
        return {"ok": True, "count": int(res.get("inserted", 0)) + int(res.get("updated", 0)) + int(res.get("kept", 0))}
    except Exception as e:
        logger.exception("Failed to refresh stocks master: %s", e)
        raise HTTPException(status_code=502, detail="Failed to refresh stocks master from Zerodha")

# ---------------- SNAPSHOT (candles + quote) ----------------
def _compact_candle_records_to_ohlcv(records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
    out: List[Dict[str, Any]] = []
    for r in records or []:
        if not isinstance(r, dict):
            continue
        t = r.get("date") or r.get("t")
        if hasattr(t, "isoformat"):
            t = t.isoformat()
        out.append(
            {
                "t": t,
                "o": r.get("open") if r.get("open") is not None else r.get("o"),
                "h": r.get("high") if r.get("high") is not None else r.get("h"),
                "l": r.get("low") if r.get("low") is not None else r.get("l"),
                "c": r.get("close") if r.get("close") is not None else r.get("c"),
                "v": r.get("volume") if r.get("volume") is not None else r.get("v"),
            }
        )
    return out


def build_runtime_snapshot_service(db, zerodha: ZerodhaClient, symbol: str) -> Dict[str, Any]:
    """Build a runtime-only snapshot.

    Rules:
    - Stock identity comes from `stocks` master list.
    - Daily/Weekly/Monthly candles are loaded from DB history ONLY.
    - Intraday (5/15/30) is fetched live for today and kept in memory only.
    - Snapshot is NOT persisted.
    """

    sym = (symbol or "").strip().upper()
    if not sym:
        raise HTTPException(status_code=400, detail="symbol is required")

    stock = db["stocks"].find_one({"symbol": sym, "exchange": "NSE"}) or db["stocks"].find_one({"symbol": sym})
    if not isinstance(stock, dict):
        raise HTTPException(status_code=404, detail=f"Stock not found in stocks master list: {sym}")

    token = stock.get("instrument_token")
    stock_id = stock.get("stock_id")
    if token is None:
        raise HTTPException(status_code=404, detail=f"instrument_token missing for {sym} in stocks master")

    # quote
    quote: Dict[str, Any] = {}
    try:
        q_raw = _call_zerodha(zerodha, "quote", [f"NSE:{sym}"])
        if isinstance(q_raw, dict):
            quote = q_raw.get(f"NSE:{sym}", q_raw.get(sym, {})) or {}
    except Exception as e:
        logger.warning("quote fetch failed for %s: %s", sym, e)
        quote = {}

    # history from DB
    day = get_stock_history(db, str(stock_id), "day")
    week = get_stock_history(db, str(stock_id), "week")
    month = get_stock_history(db, str(stock_id), "month")

    # intraday live-only
    c5 = GLOBAL_INTRADAY_STORE.get_intraday_candles(zerodha_client=zerodha, instrument_token=int(token), timeframe="5minute", max_candles=100)
    c15 = GLOBAL_INTRADAY_STORE.get_intraday_candles(zerodha_client=zerodha, instrument_token=int(token), timeframe="15minute", max_candles=100)
    c30 = GLOBAL_INTRADAY_STORE.get_intraday_candles(zerodha_client=zerodha, instrument_token=int(token), timeframe="30minute", max_candles=100)

    snapshot = {
        "symbol": sym,
        "stock_id": str(stock_id) if stock_id is not None else None,
        "instrument_token": token,
        "timestamp": datetime.utcnow().isoformat(),
        "quote": quote,
        "candles": {
            "day": _compact_candle_records_to_ohlcv(day),
            "week": _compact_candle_records_to_ohlcv(week),
            "month": _compact_candle_records_to_ohlcv(month),
            "5minute": _compact_candle_records_to_ohlcv(c5),
            "15minute": _compact_candle_records_to_ohlcv(c15),
            "30minute": _compact_candle_records_to_ohlcv(c30),
        },
        "meta": {"source": "runtime", "note": "Snapshot is not persisted"},
    }
    return snapshot

def create_snapshot_service(db, zerodha: ZerodhaClient, symbol: str) -> Dict[str,Any]:
    """Back-compat: build a runtime snapshot and DO NOT persist it."""
    snapshot = build_runtime_snapshot_service(db, zerodha, symbol)
    return {"_id": None, "symbol": snapshot.get("symbol"), "snapshot": snapshot, "created_at": datetime.utcnow()}

def get_snapshot_service(db, snapshot_id: str) -> Optional[Dict[str,Any]]:
    # Snapshots are not persisted by design.
    return None

# ---------------- LLM call & simple JSON parsing ----------------
def _extract_json_from_text(text: str) -> Optional[Dict[str,Any]]:
    """
    Extract first JSON object from text. Returns dict or None.
    """
    if not text:
        return None
    try:
        return json.loads(text)
    except Exception:
        pass
    start=None; depth=0
    for i,ch in enumerate(text):
        if ch=="{":
            if start is None: start=i
            depth+=1
        elif ch=="}":
            depth-=1
            if depth==0 and start is not None:
                candidate = text[start:i+1]
                try:
                    return json.loads(candidate)
                except Exception:
                    start=None
    return None

def _validate_llm_response(resp: Dict[str,Any]) -> Dict[str,Any]:
    """
    Minimal validation: ensure decision exists and is one of BUY/SELL/HOLD.
    Keep other fields as-is.
    """
    if not isinstance(resp, dict):
        return {"decision":"HOLD","confidence":"LOW","rationale":["Invalid LLM response format"]}
    decision = str(resp.get("decision","")).upper()
    if decision not in ("BUY","SELL","HOLD"):
        return {"decision":"HOLD","confidence":"LOW","rationale":["Invalid decision value from LLM"]}
    # normalize confidence
    conf = resp.get("confidence","LOW")
    if isinstance(conf,str):
        conf = conf.upper()
        if conf not in ("HIGH","MEDIUM","LOW"): conf="LOW"
    else:
        conf="LOW"
    resp["confidence"] = conf
    resp["decision"] = decision
    # ensure rationale exists
    if "rationale" not in resp:
        resp["rationale"] = ["No rationale provided by model"]
    return resp

def call_chatgpt_analyze_service(snapshot: Dict[str,Any], question: Optional[str]=None) -> Dict[str,Any]:
    """
    Send snapshot JSON to ChatGPT, expect a single JSON object back (model must return JSON).
    Returns validated dict. No local algorithms applied.
    """
    if not OPENAI_API_KEY:
        logger.warning("OPENAI_API_KEY not set — returning fallback HOLD")
        return {"decision":"HOLD","confidence":"LOW","rationale":["LLM not configured"]}

    system_prompt = (
        "You are an intraday trading assistant. You will be given a JSON payload containing a 'snapshot' key "
        "with raw OHLCV arrays and a 'quote' key. Analyze only the provided data and return EXACTLY one JSON object "
        "with keys: decision (BUY|SELL|HOLD), confidence (HIGH|MEDIUM|LOW), optional entry:{low,high}, optional stop_loss, optional targets[], rationale[] . "
        "Return only JSON — no extra commentary. If uncertain, return decision=HOLD and explain why in rationale."
    )
    user_payload = {"snapshot": snapshot, "question": question or ""}

    try:
        client = _get_openai_client()
        messages = [
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": json.dumps(user_payload, default=str)},
        ]
        token_kwargs = _max_tokens_kwarg(OPENAI_MODEL, 800)
        try:
            resp = client.chat.completions.create(
                model=OPENAI_MODEL,
                messages=messages,
                temperature=0.0,
                timeout=LLM_TIMEOUT_SEC,
                **token_kwargs,
            )
        except Exception as e:
            msg = str(e)
            if "max_tokens" in msg and "max_completion_tokens" in msg:
                logger.warning(
                    "Retrying OpenAI call with max_completion_tokens due to parameter mismatch (model=%s)",
                    OPENAI_MODEL,
                )
                resp = client.chat.completions.create(
                    model=OPENAI_MODEL,
                    messages=messages,
                    temperature=0.0,
                    timeout=LLM_TIMEOUT_SEC,
                    max_completion_tokens=800,
                )
            else:
                raise
        # defensive extraction of text
        text = ""
        try:
            text = resp.choices[0].message["content"]
        except Exception:
            try:
                text = getattr(resp.choices[0].message, "content", "") or str(resp)
            except Exception:
                text = str(resp)

        parsed = _extract_json_from_text(text)
        if not parsed:
            logger.warning("LLM did not return parseable JSON. Raw excerpt: %s", (text or "")[:500])
            return {"decision":"HOLD","confidence":"LOW","rationale":["LLM did not return JSON"]}
        validated = _validate_llm_response(parsed)
        return validated
    except Exception as e:
        logger.exception("LLM call failed: %s", e)
        return {"decision":"HOLD","confidence":"LOW","rationale":[f"LLM error: {str(e)}"]}

# ---------------- CHAT SERVICE (ChatGPT-only) ----------------
def chat_symbol_service(db, zerodha: ZerodhaClient, user: Dict[str,Any], symbol: str, payload: Dict[str,Any]) -> Dict[str,Any]:
    """
    Create (or load) snapshot, send to ChatGPT, persist chat record, return LLM response.
    No local heuristics or fallback algorithms applied.
    """
    snapshot_id = payload.get("snapshot_id")
    question = (payload.get("question") or "").strip()

    # Snapshot IDs are no longer supported because snapshots are not persisted.
    if snapshot_id:
        logger.info("Ignoring snapshot_id=%s because snapshots are runtime-only", snapshot_id)

    try:
        snapshot = build_runtime_snapshot_service(db, zerodha, symbol)
    except HTTPException:
        raise
    except Exception as e:
        logger.exception("snapshot creation failed for %s: %s", symbol, e)
        raise HTTPException(status_code=500, detail="Snapshot creation failed")
    has_data = bool(snapshot.get("candles") or snapshot.get("quote"))
    if not has_data:
        logger.warning("Snapshot for %s has no data; skipping LLM.", symbol)
        llm_response = {"decision":"HOLD","confidence":"LOW","rationale":["Snapshot empty"]}
    else:
        llm_response = call_chatgpt_analyze_service(snapshot, question)

    if not isinstance(llm_response, dict) or "decision" not in llm_response:
        llm_response = {"decision":"HOLD","confidence":"LOW","rationale":["Invalid LLM response"]}

    chat_doc = {
        "symbol": snapshot.get("symbol", symbol),
        "snapshot_id": None,
        "user_id": str(user.get("_id")),
        "question": question,
        "llm_response": llm_response,
        "created_at": datetime.utcnow(),
    }
    db["chats"].insert_one(chat_doc)
    return llm_response

# ---------------- LIVE SIGNALS (ChatGPT-only) ----------------
def get_live_signals_service(db, zerodha: ZerodhaClient, mover: str="gainers", limit: int=50, user_id: Optional[str]=None, question: Optional[str]=None) -> Dict[str,Any]:
    """
    For each mover symbol, create snapshot, call ChatGPT, persist the LLM response.
    Returns list of LLM-decisions for frontend.
    """
    results = []
    try:
        movers_doc = db["movers"].find_one({"type":"latest"}) or {}
        candidates = (movers_doc.get("gainers") if mover=="gainers" else movers_doc.get("losers")) or movers_doc.get("merged") or []
        candidates = [c for c in (candidates or []) if isinstance(c,str)]
        if not candidates:
            logger.info("No movers found for mover=%s", mover)
            return {"results": []}

        for raw_symbol in candidates[:limit]:
            try:
                snapshot = build_runtime_snapshot_service(db, zerodha, raw_symbol)
                llm_decision = call_chatgpt_analyze_service(snapshot, question=question)
            except HTTPException as he:
                logger.warning("create_snapshot_service failed for %s: %s", raw_symbol, str(he.detail if hasattr(he,'detail') else he))
                llm_decision = {"decision":"HOLD","confidence":"LOW","rationale":[f"Snapshot failed: {str(he)}"]}
                snapshot = None
            except Exception as e:
                logger.exception("Error processing %s: %s", raw_symbol, e)
                llm_decision = {"decision":"HOLD","confidence":"LOW","rationale":[f"Error: {str(e)}"]}
                snapshot = None

            # persist lightweight stream doc
            stream_doc = {
                "symbol": raw_symbol,
                "user_id": user_id or None,
                "mover_type": mover,
                "snapshot_id": None,
                "llm_response": llm_decision,
                "created_at": datetime.utcnow()
            }
            try:
                inserted = db["stream"].insert_one(stream_doc)
                stream_id = str(inserted.inserted_id)
            except Exception:
                logger.exception("Failed to persist stream doc for %s", raw_symbol)
                stream_id = None

            # shape minimal row for frontend
            quote = (snapshot.get("quote", {}) if isinstance(snapshot, dict) else {}) or {}
            ltp = quote.get("last_price", None)
            row = {
                "id": stream_id,
                "symbol": raw_symbol,
                "decision": llm_decision.get("decision"),
                "confidence": llm_decision.get("confidence"),
                "ltp": ltp,
                "llm": llm_decision,
                "snapshot_id": None,
            }
            results.append(row)

        return {"results": results}
    except Exception as e:
        logger.exception("get_live_signals_service failed: %s", e)
        return {"results": [], "error": str(e)}

# ---------------- ORDER PLACEMENT (delegated, minimal) ----------------
def place_short_order_service(db, zerodha: ZerodhaClient, user: Dict[str,Any], payload: Dict[str,Any]) -> Dict[str,Any]:
    if not payload.get("user_confirmation"):
        raise HTTPException(status_code=400, detail="user_confirmation required")
    user_id = str(user.get("_id"))
    user_settings = db["user_settings"].find_one({"user_id": user_id}) or {}
    if not user_settings.get("allow_shorts", False):
        raise HTTPException(status_code=403, detail="User not permitted to short-sell")

    symbol = payload.get("symbol")
    qty = int(payload.get("quantity",0) or 0)
    price = payload.get("price")
    order_type = payload.get("order_type","MARKET")
    MAX_NOTIONAL = float(os.getenv("MAX_NOTIONAL_PER_ORDER", "200000"))
    notional = (float(price) if price else 0.0) * qty
    if price and notional > MAX_NOTIONAL:
        raise HTTPException(status_code=400, detail="Order exceeds notional limit")
    token = _get_instrument_token_from_db(db, symbol)
    if not token:
        raise HTTPException(status_code=404, detail="Instrument token not found")
    order_payload = {
        "tradingsymbol": symbol,
        "exchange": "NSE",
        "transaction_type": "SELL",
        "quantity": qty,
        "product": "MIS",
        "order_type": order_type.upper(),
    }
    if order_type.upper()=="LIMIT" and price:
        order_payload["price"] = float(price)
    try:
        placed = _call_zerodha(zerodha, "place_order", order_payload)
    except Exception as e:
        logger.exception("Order placement failed: %s", e)
        raise HTTPException(status_code=502, detail=str(e))
    order_doc = {"user_id": user_id, "symbol": symbol, "order_payload": order_payload, "zerodha_response": placed, "created_at": datetime.utcnow()}
    db["orders"].insert_one(order_doc)
    return {"order": order_doc, "zerodha_response": placed}

# ---------------- KITE POSTBACK ----------------
def kite_postback_service(payload: Dict[str,Any]) -> Dict[str,Any]:
    db = database.get_mongo_db()
    db["zerodha_postbacks"].insert_one({"payload": payload, "received_at": datetime.utcnow()})
    status = payload.get("status")
    if status in ("REJECTED","CANCELLED","COMPLETE","TRIGGER_PENDING"):
        alert = {"type":"order_postback","payload":payload,"created_at":datetime.utcnow()}
        db["alerts"].insert_one(alert)
    return {"ok": True}
