import asyncio
import logging
import os
import uuid
from datetime import datetime
from typing import Any, Dict, List, Optional

from zoneinfo import ZoneInfo

from app.db.database import get_mongo_db
from app.v1.background.global_intraday import _get_global_zerodha_client
from app.v1.services.stock_history import refresh_stock_history_for_stock

logger = logging.getLogger(__name__)

_INDEXES_ENSURED = False


def _ensure_indexes(db) -> None:
    """Best-effort index creation.

    Ensures we never create duplicate stock history documents for the same stock_id,
    even if multiple processes accidentally run background loops.
    """
    global _INDEXES_ENSURED
    if _INDEXES_ENSURED:
        return

    try:
        # Unique: one history document per stock_id.
        db["stock_history"].create_index(
            [("stock_id", 1)],
            unique=True,
            name="ux_stock_history_stock_id",
            background=True,
        )
    except Exception as e:
        # If duplicates already exist, Mongo will refuse to create this index.
        logger.warning("[StockHistory] Could not create unique index on stock_history.stock_id: %s", str(e))

    try:
        # system_meta uses {key,value}; keep keys unique so cursor/markers don't duplicate.
        db["system_meta"].create_index(
            [("key", 1)],
            unique=True,
            name="ux_system_meta_key",
            background=True,
        )
    except Exception as e:
        logger.warning("[StockHistory] Could not create unique index on system_meta.key: %s", str(e))

    _INDEXES_ENSURED = True

LOOP_INTERVAL_SECONDS = int(os.getenv("STOCK_HISTORY_REFRESH_LOOP_SECONDS", "1800"))
BATCH_SIZE = int(os.getenv("STOCK_HISTORY_REFRESH_BATCH_SIZE", "10"))
SLEEP_BETWEEN_CALLS_SEC = float(os.getenv("STOCK_HISTORY_REFRESH_SLEEP_SEC", "0.05"))

# Optional: write failures to a local append-only file for easy debugging.
# Defaults to API/stock_history_error_logs.txt when run from the API folder.
ERROR_LOG_PATH = (os.getenv("STOCK_HISTORY_ERROR_LOG_PATH", "stock_history_error_logs.txt") or "").strip()


def _append_error_log(*, run_id: str, symbol: str, stock_id: str, errors: List[Any]) -> None:
    if not ERROR_LOG_PATH:
        return
    try:
        ts = datetime.utcnow().isoformat(timespec="seconds") + "Z"
        sid = (stock_id or "").strip()
        sym = (symbol or "").strip().upper()
        if not sym:
            return

        lines: List[str] = []
        for e in (errors or []):
            msg = str(e)
            msg = msg.replace("\n", " ").replace("\r", " ").strip()
            lines.append(f"{ts} | {run_id} | {sym} | {sid} | {msg}\n")

        if not lines:
            return

        with open(ERROR_LOG_PATH, "a", encoding="utf-8") as f:
            f.writelines(lines)
    except Exception:
        # Never let file logging break the refresh loop.
        pass

# How many stocks to process per run/session. In `eod` / `bootstrap_then_eod`,
# a "session" is the once-per-day run inside the EOD window.
MAX_STOCKS_PER_RUN = int(os.getenv("STOCK_HISTORY_MAX_STOCKS_PER_RUN", "500"))

IST = ZoneInfo("Asia/Kolkata")

# Modes:
# - continuous: keep refreshing in batches forever (default)
# - eod: run once per IST day in the configured EOD window
# - bootstrap_then_eod: run continuously until history exists for most symbols, then switch to EOD
MODE = (os.getenv("STOCK_HISTORY_REFRESH_MODE", "bootstrap_then_eod") or "bootstrap_then_eod").strip().lower()

EOD_START_HHMM = os.getenv("STOCK_HISTORY_EOD_START_HHMM", "17:00")
EOD_END_HHMM = os.getenv("STOCK_HISTORY_EOD_END_HHMM", "18:30")

# Trading days: by default, only run the scheduled EOD job on weekdays.
WEEKDAYS_ONLY = (os.getenv("STOCK_HISTORY_EOD_WEEKDAYS_ONLY", "1") or "1").strip().lower() not in ("0", "false", "no")
# Allow weekend backfill when data is missing (optional override).
ALLOW_WEEKEND_BACKFILL = (os.getenv("STOCK_HISTORY_ALLOW_WEEKEND_BACKFILL", "0") or "0").strip().lower() in ("1", "true", "yes")

# Freshness threshold for considering bootstrap "complete". If most sampled symbols have a
# day candle within this many IST days of the latest trading day, we consider it up-to-date.
BOOTSTRAP_FRESHNESS_DAYS = int(os.getenv("STOCK_HISTORY_BOOTSTRAP_FRESHNESS_DAYS", "3"))

# Append-only run logs for auditing/monitoring.
RUN_LOG_COLLECTION = os.getenv("STOCK_HISTORY_RUN_LOG_COLLECTION", "stock_history_run_logs")

# Universe filter for history refresh. Defaults to equities + indices.
# This intentionally excludes G-Sec / bond-like instruments that may exist in the instruments master.
INSTRUMENT_TYPES = [t.strip().upper() for t in (os.getenv("STOCK_HISTORY_INSTRUMENT_TYPES", "EQ,INDEX") or "EQ,INDEX").split(",") if t.strip()]

# In this codebase, many debt instruments (GOI loans / G-Secs / TBills) may still appear as
# instrument_type=EQ in Zerodha's instruments master. The most robust discriminator we have
# without fetching market quotes is the Zerodha-provided `name`.
#
# We therefore default to:
# - include the full instrument_type universe (typically EQ)
# - exclude debt-like instruments by `name` (and optionally by a tight symbol suffix blacklist)

# Optional: include only hyphenated symbols that end in one of these suffixes.
# Disabled by default because it can exclude valid symbols in some datasets.
ALLOWED_SYMBOL_SUFFIXES = [
    t.strip().upper()
    for t in (os.getenv("STOCK_HISTORY_ALLOWED_SYMBOL_SUFFIXES", "") or "").split(",")
    if t.strip()
]

# Default: exclude GOI loans and similar debt instruments by Zerodha `name`.
# This is the most reliable discriminator available in your current dataset.
# You can extend/override it via env.
EXCLUDE_NAME_REGEX = (
    os.getenv(
        "STOCK_HISTORY_EXCLUDE_NAME_REGEX",
        "GOI LOAN|TREASURY|T-BILL|TBILL|G-SEC|GSEC|GOVT|BHARATBOND|GOLDBOND|GOLD BOND",
    )
    or ""
).strip()

# Optional: exclude by symbol/tradingsymbol suffix.
# Default includes common debt / G-Sec / SGB suffixes observed in your dataset.
# Note: Many debt instruments in your `stocks` have an empty `name`, so suffix-based exclusion
# is required as a backstop.
EXCLUDE_SYMBOL_REGEX = (
    os.getenv(
        "STOCK_HISTORY_EXCLUDE_SYMBOL_REGEX",
        # Common non-equity / non-candle tickers observed in your dataset:
        # - `*INAV` (ETF indicative NAV tickers)
        # - `-IT` (index/indicative tickers)
        # - `-W1`, `-W2` ... (warrants/rights series in some feeds)
        r"(?:-(SG|GS|TB|GB|N[0-9A-Z]{1,2}|IV|IT|W\d+)|INAV)$",
    )
    or ""
).strip()


def _universe_query(*, after_symbol: str = "") -> Dict[str, Any]:
    q: Dict[str, Any] = {
        "is_active": {"$ne": False},
        "exchange": "NSE",
        "instrument_token": {"$ne": None},
        "stock_id": {"$ne": None},
        # In your dataset, many non-equity instruments have an empty/blank `name`.
        # Real equities and indices typically have a stable `name`, so treat this as a
        # safety backstop to keep the refresh universe tradable.
        "name": {"$exists": True, "$ne": ""},
        "instrument_type": {"$in": INSTRUMENT_TYPES},
    }

    # Exclude debt-like instruments by name/tradingsymbol/symbol patterns.
    # This uses Zerodha-provided metadata and is more stable than guessing from symbol format.
    nor: List[Dict[str, Any]] = []
    if EXCLUDE_NAME_REGEX:
        nor.extend(
            [
                {"name": {"$regex": EXCLUDE_NAME_REGEX, "$options": "i"}},
                {"tradingsymbol": {"$regex": EXCLUDE_NAME_REGEX, "$options": "i"}},
                {"symbol": {"$regex": EXCLUDE_NAME_REGEX, "$options": "i"}},
            ]
        )
    if EXCLUDE_SYMBOL_REGEX:
        nor.extend(
            [
                {"symbol": {"$regex": EXCLUDE_SYMBOL_REGEX}},
                {"tradingsymbol": {"$regex": EXCLUDE_SYMBOL_REGEX}},
            ]
        )
    if nor:
        q["$nor"] = nor

    # Optional strict allowlist by hyphen suffix (off by default).
    if ALLOWED_SYMBOL_SUFFIXES:
        suffix_group = "|".join([s.replace("|", "") for s in ALLOWED_SYMBOL_SUFFIXES])
        q["$or"] = [
            {"symbol": {"$not": {"$regex": "-"}}},
            {"symbol": {"$regex": rf"-({suffix_group})$"}},
        ]

    if after_symbol:
        q["symbol"] = {"$gt": after_symbol}
    return q


def _parse_hhmm(raw: str, default_h: int, default_m: int) -> tuple[int, int]:
    try:
        parts = (raw or "").strip().split(":")
        if len(parts) != 2:
            return default_h, default_m
        h = int(parts[0])
        m = int(parts[1])
        if 0 <= h <= 23 and 0 <= m <= 59:
            return h, m
    except Exception:
        pass
    return default_h, default_m


def _is_in_eod_window(now_utc: Optional[datetime] = None) -> bool:
    now_utc = now_utc or datetime.utcnow()
    now_ist = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
    sh, sm = _parse_hhmm(EOD_START_HHMM, 15, 40)
    eh, em = _parse_hhmm(EOD_END_HHMM, 23, 30)
    start = now_ist.replace(hour=sh, minute=sm, second=0, microsecond=0)
    end = now_ist.replace(hour=eh, minute=em, second=0, microsecond=0)
    return start <= now_ist <= end


def _is_weekday_ist(now_utc: Optional[datetime] = None) -> bool:
    now_utc = now_utc or datetime.utcnow()
    now_ist = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
    return now_ist.weekday() < 5


def _latest_trading_day_ist(now_utc: Optional[datetime] = None) -> str:
    """Best-effort latest trading day (weekdays only).

    We don't have exchange holiday calendars here; this simply maps Sat/Sun -> Fri.
    """
    now_utc = now_utc or datetime.utcnow()
    now_ist = now_utc.replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
    d = now_ist.date()
    if now_ist.weekday() == 5:  # Sat
        d = d.replace(day=d.day)  # no-op; kept for clarity
        from datetime import timedelta as _td

        d = d - _td(days=1)
    elif now_ist.weekday() == 6:  # Sun
        from datetime import timedelta as _td

        d = d - _td(days=2)
    return d.isoformat()


def _last_eod_run_key(now_utc: Optional[datetime] = None) -> str:
    """Return the trading-day key that this EOD refresh corresponds to.

    This should match the latest trading day (weekends roll back to Friday),
    not the calendar date the job happened to run.
    """
    return _latest_trading_day_ist(now_utc)


def _load_last_eod_run(db) -> str:
    try:
        doc = db["system_meta"].find_one({"key": "stock_history_refresh_last_eod_run"}) or {}
        return str(doc.get("value") or "")
    except Exception:
        return ""


def _save_last_eod_run(db, value: str) -> None:
    try:
        db["system_meta"].update_one(
            {"key": "stock_history_refresh_last_eod_run"},
            {"$set": {"key": "stock_history_refresh_last_eod_run", "value": str(value or ""), "updated_at": datetime.utcnow()}},
            upsert=True,
        )
    except Exception:
        pass


def _save_run_log(db, doc: Dict[str, Any]) -> None:
    """Persist a small run summary (never store candles)."""
    try:
        if not isinstance(doc, dict):
            return
        db[RUN_LOG_COLLECTION].insert_one(doc)
    except Exception:
        logger.exception("[StockHistory] Failed to write run log")


def _history_coverage_ok(db, *, sample_limit: int = 200) -> bool:
    """Best-effort heuristic: consider history "ready" if most sampled stocks have DAILY history.

    Weekly/monthly are derived at read-time from daily candles, so they are not a coverage requirement.
    """
    try:
        stocks = list(
            db["stocks"].find(
                _universe_query(),
                {"_id": 0, "stock_id": 1},
            ).limit(max(20, sample_limit))
        )
        if not stocks:
            return False
        ids = [str(s.get("stock_id")) for s in stocks if s.get("stock_id")]
        if not ids:
            return False
        # New schema: one doc per stock_id with top-level day array.
        # Backward compatible: also accept older nested candles.day.
        have = list(db["stock_history"].find({"stock_id": {"$in": ids}}, {"_id": 0, "stock_id": 1, "day": 1, "candles": 1}))

        has_day: set[str] = set()
        for r in have:
            sid = str(r.get("stock_id") or "")
            if not sid:
                continue

            arr = r.get("day")
            if isinstance(arr, list) and len(arr) > 0:
                has_day.add(sid)
                continue

            candles = r.get("candles")
            if isinstance(candles, dict):
                arr2 = candles.get("day")
                if isinstance(arr2, list) and len(arr2) > 0:
                    has_day.add(sid)

        ok = sum(1 for sid in ids if sid in has_day)
        # If >=80% have daily candles, treat as "ready".
        return (ok / max(1, len(ids))) >= 0.8
    except Exception:
        return False


def _history_fresh_enough(db, *, sample_limit: int = 200, max_age_days: int = 3) -> bool:
    """Heuristic: history is "fresh" if most sampled symbols have a recent day candle.

    We consider "recent" relative to the latest trading day (weekdays-only approximation).
    """
    try:
        stocks = list(
            db["stocks"].find(
                _universe_query(),
                {"_id": 0, "stock_id": 1},
            ).limit(max(20, sample_limit))
        )
        if not stocks:
            return False

        ids = [str(s.get("stock_id")) for s in stocks if s.get("stock_id")]
        if not ids:
            return False

        # Pull the last stored day candle date for each sampled stock.
        docs = list(
            db["stock_history"].find(
                {"stock_id": {"$in": ids}},
                {"_id": 0, "stock_id": 1, "day_last_date": 1, "day": 1, "candles": 1},
            )
        )
        last_by_id: Dict[str, str] = {}
        for d in docs:
            sid = str(d.get("stock_id") or "")
            if not sid:
                continue

            # Fast path (new schema): explicit metadata.
            m = d.get("day_last_date")
            if isinstance(m, str) and m.strip():
                try:
                    last_by_id[sid] = datetime.fromisoformat(m.strip()).date().isoformat()
                    continue
                except Exception:
                    pass

            day_candles = d.get("day")
            if not isinstance(day_candles, list) or not day_candles:
                candles_obj = d.get("candles")
                if isinstance(candles_obj, dict):
                    day_candles = candles_obj.get("day")
            if not isinstance(day_candles, list) or not day_candles:
                continue
            last = day_candles[-1] or {}
            dt = last.get("date")
            # Normalize to ISO date string
            if isinstance(dt, str):
                # accept both date-only and datetime strings
                try:
                    last_by_id[sid] = datetime.fromisoformat(dt.replace("Z", "+00:00")).date().isoformat()
                except Exception:
                    continue
            elif isinstance(dt, datetime):
                last_by_id[sid] = dt.date().isoformat()

        if not last_by_id:
            return False

        latest_trade_day = _latest_trading_day_ist()
        latest_dt = datetime.fromisoformat(latest_trade_day)

        ok = 0
        for sid in ids:
            s = last_by_id.get(sid)
            if not s:
                continue
            try:
                d = datetime.fromisoformat(s)
                age = (latest_dt - d).days
                if age >= 0 and age <= int(max_age_days):
                    ok += 1
            except Exception:
                continue

        # If >=80% have a fresh day candle, treat as "up-to-date".
        return (ok / max(1, len(ids))) >= 0.8
    except Exception:
        return False


def _load_cursor(db) -> str:
    try:
        doc = db["system_meta"].find_one({"key": "stock_history_refresh_cursor"}) or {}
        cur = doc.get("value")
        return str(cur or "")
    except Exception:
        return ""


def _save_cursor(db, value: str) -> None:
    try:
        db["system_meta"].update_one(
            {"key": "stock_history_refresh_cursor"},
            {"$set": {"key": "stock_history_refresh_cursor", "value": str(value or ""), "updated_at": datetime.utcnow()}},
            upsert=True,
        )
    except Exception:
        pass


def _next_batch(db, after_symbol: str, batch_size: int) -> List[Dict[str, Any]]:
    q = _universe_query(after_symbol=after_symbol)

    cur = (
        db["stocks"]
        .find(q, {"_id": 0, "symbol": 1, "stock_id": 1, "exchange": 1, "instrument_token": 1})
        .sort([("symbol", 1)])
        .limit(max(1, batch_size))
    )
    rows = list(cur)
    return rows


async def stock_history_refresh_loop(interval_seconds: Optional[int] = None) -> None:
    interval_seconds = int(interval_seconds or LOOP_INTERVAL_SECONDS)
    interval_seconds = max(60, interval_seconds)

    # If true, `bootstrap_then_eod` behaves like the old "continuous until ready" mode.
    # Default is false to respect the configured EOD window (prevents immediate refresh on server start).
    BOOTSTRAP_CONTINUOUS = (os.getenv("STOCK_HISTORY_BOOTSTRAP_CONTINUOUS", "0") or "0").strip().lower() in ("1", "true", "yes")

    while True:
        try:
            for db in get_mongo_db():
                _ensure_indexes(db)
                run_id = uuid.uuid4().hex[:8]
                session_started = datetime.utcnow()
                session_mode = MODE
                session_bootstrap_ready = None
                session_processed = 0
                session_updated_day = 0
                session_errors = 0
                session_last_symbol = None
                session_stop_reason: Optional[str] = None
                session_enforce_eod_schedule = False

                # Universe stats (helps explain why totals don't match expectations).
                try:
                    base_q = _universe_query()
                    universe_with_stock_id = db["stocks"].count_documents(base_q)
                    # For clarity, also report how many "instrument_type" matches without suffix filtering.
                    raw_q: Dict[str, Any] = {
                        "is_active": {"$ne": False},
                        "exchange": "NSE",
                        "instrument_token": {"$ne": None},
                        "stock_id": {"$ne": None},
                        "instrument_type": {"$in": INSTRUMENT_TYPES},
                    }
                    universe_total = db["stocks"].count_documents(raw_q)
                except Exception:
                    universe_total = None
                    universe_with_stock_id = None

                # Mode gate: EOD-only schedules.
                if MODE in ("eod", "bootstrap_then_eod"):
                    bootstrap_ready = _history_coverage_ok(db) and _history_fresh_enough(
                        db, sample_limit=200, max_age_days=max(1, BOOTSTRAP_FRESHNESS_DAYS)
                    )
                    session_bootstrap_ready = bool(bootstrap_ready)

                    # Default behavior: ALWAYS respect the configured EOD window in both modes.
                    # This prevents immediate refresh when the server boots.
                    enforce_eod_schedule = (MODE == "eod") or (MODE == "bootstrap_then_eod" and not BOOTSTRAP_CONTINUOUS) or (
                        MODE == "bootstrap_then_eod" and bootstrap_ready
                    )

                    session_enforce_eod_schedule = bool(enforce_eod_schedule)

                    if enforce_eod_schedule:
                        # Weekday-only scheduled run (trading days are weekdays). Allow override.
                        if WEEKDAYS_ONLY and not _is_weekday_ist() and not ALLOW_WEEKEND_BACKFILL:
                            session_stop_reason = "not_weekday"
                            break
                        if not _is_in_eod_window():
                            session_stop_reason = "outside_eod_window"
                            break
                        today_key = _last_eod_run_key()
                        if _load_last_eod_run(db) == today_key:
                            session_stop_reason = "already_ran_today"
                            break

                zerodha = _get_global_zerodha_client(db)
                if not zerodha:
                    logger.warning("[StockHistory] Global Zerodha client unavailable; skipping")
                    break

                processed = 0
                latest_trade_day = _latest_trading_day_ist()
                touched_eod_window = _is_in_eod_window()
                logger.info(
                    "[StockHistory] Run start id=%s mode=%s instrument_types=%s excluded_name=%s excluded_symbol=%s allowed_suffixes=%s universe_raw=%s universe_filtered=%s",
                    run_id,
                    session_mode,
                    INSTRUMENT_TYPES,
                    EXCLUDE_NAME_REGEX,
                    EXCLUDE_SYMBOL_REGEX,
                    ALLOWED_SYMBOL_SUFFIXES,
                    universe_total,
                    universe_with_stock_id,
                )
                # Session loop: process many batches in one run (bounded by MAX_STOCKS_PER_RUN).
                while processed < max(1, MAX_STOCKS_PER_RUN):
                    # Track whether this run overlapped the EOD window.
                    # IMPORTANT: In bootstrap/continuous runs, the session may start outside the
                    # window but continue into it; we still want to stamp the EOD marker once
                    # we have actually processed data during the EOD window.
                    if not touched_eod_window and _is_in_eod_window():
                        touched_eod_window = True

                    # In EOD modes, stop if we fall out of window.
                    if session_enforce_eod_schedule and not _is_in_eod_window():
                        session_stop_reason = "fell_outside_eod_window"
                        break

                    cursor_before = _load_cursor(db)
                    batch = _next_batch(db, cursor_before, BATCH_SIZE)
                    if not batch:
                        # Cursor is past the end of the (filtered) universe.
                        # Reset cursor so the next session starts from the beginning.
                        if cursor_before:
                            _save_cursor(db, "")
                            logger.info("[StockHistory] Reached end of universe; resetting cursor")
                            session_stop_reason = "completed"
                        else:
                            logger.info("[StockHistory] No stocks found to refresh")
                            session_stop_reason = "no_stocks"
                        break

                    last_symbol = cursor_before
                    for stock in batch:
                        if processed >= max(1, MAX_STOCKS_PER_RUN):
                            break
                        sym = (stock.get("symbol") or "").strip().upper()
                        if not sym:
                            continue

                        sid = str(stock.get("stock_id") or "")

                        # Skip API calls if we already have the latest trading day.
                        try:
                            if sid:
                                h = db["stock_history"].find_one({"stock_id": sid}, {"_id": 0, "day_last_date": 1}) or {}
                                d = h.get("day_last_date")
                                if isinstance(d, str) and d.strip() and d.strip() >= latest_trade_day:
                                    last_symbol = sym
                                    processed += 1
                                    session_processed += 1
                                    session_last_symbol = last_symbol
                                    continue
                        except Exception:
                            pass

                        res = refresh_stock_history_for_stock(db=db, zerodha_client=zerodha, stock=stock, timeframes=["day"])
                        last_symbol = sym
                        processed += 1
                        session_processed += 1
                        session_last_symbol = last_symbol

                        if isinstance(res, dict):
                            updated = res.get("updated")
                            if isinstance(updated, list) and updated:
                                if "day" in updated:
                                    session_updated_day += 1
                            errs = res.get("errors")
                            if isinstance(errs, list) and errs:
                                session_errors += len(errs)
                                _append_error_log(run_id=run_id, symbol=sym, stock_id=sid, errors=errs)
                        if SLEEP_BETWEEN_CALLS_SEC > 0:
                            await asyncio.sleep(SLEEP_BETWEEN_CALLS_SEC)

                    _save_cursor(db, last_symbol)
                    logger.info(
                        "[StockHistory] id=%s Refreshed batch size=%d cursor=%s->%s processed=%d",
                        run_id,
                        len(batch),
                        cursor_before,
                        last_symbol,
                        processed,
                    )

                # Mark the once-per-day EOD session as done.
                # In bootstrap_then_eod, we still mark it so the job won't rerun repeatedly during the same day.
                if MODE in ("eod", "bootstrap_then_eod") and touched_eod_window and processed > 0:
                    _save_last_eod_run(db, _last_eod_run_key())

                if session_stop_reason is None:
                    if processed >= max(1, MAX_STOCKS_PER_RUN):
                        session_stop_reason = "max_stocks_per_run"
                    else:
                        session_stop_reason = "completed"

                # Always write a compact run log so you can track progress over time.
                try:
                    now_ist = datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
                    _save_run_log(
                        db,
                        {
                            "timestamp": datetime.utcnow(),
                            "ist_date": now_ist.date().isoformat(),
                            "mode": session_mode,
                            "bootstrap_ready": session_bootstrap_ready,
                            "stop_reason": session_stop_reason,
                            "instrument_types": INSTRUMENT_TYPES,
                            "allowed_symbol_suffixes": ALLOWED_SYMBOL_SUFFIXES,
                            "exclude_name_regex": EXCLUDE_NAME_REGEX,
                            "exclude_symbol_regex": EXCLUDE_SYMBOL_REGEX,
                            "universe_total": universe_total,
                            "universe_with_stock_id": universe_with_stock_id,
                            "eod_window": {"start": EOD_START_HHMM, "end": EOD_END_HHMM},
                            "weekdays_only": WEEKDAYS_ONLY,
                            "allow_weekend_backfill": ALLOW_WEEKEND_BACKFILL,
                            "batch_size": BATCH_SIZE,
                            "sleep_between_calls_sec": SLEEP_BETWEEN_CALLS_SEC,
                            "max_stocks_per_run": MAX_STOCKS_PER_RUN,
                            "processed": session_processed,
                            "updated": {"day": session_updated_day},
                            "errors": session_errors,
                            "cursor_last_symbol": session_last_symbol,
                            "run_id": run_id,
                            "duration_seconds": round((datetime.utcnow() - session_started).total_seconds(), 2),
                        },
                    )
                except Exception:
                    logger.exception("[StockHistory] Failed to assemble run log")
                break
        except Exception:
            logger.exception("[StockHistory] Refresh loop error")

        await asyncio.sleep(interval_seconds)
