import asyncio
import logging
import os
from logging.handlers import RotatingFileHandler
from datetime import datetime
from datetime import timedelta
from zoneinfo import ZoneInfo
from typing import Dict, Any, List, Optional, Tuple

from app.db.database import get_mongo_db
from app.v1.services.zerodha.client import ZerodhaClient
from app.v1.services.teGPT import analyze_symbol_service
from app.v1.services.paper_trading import process_signal_and_market_update
from app.v1.services.alerts import upsert_intraday_alert_from_snapshot
from app.v1.utils.confidence import normalize_confidence
from app.v1.services.intraday_watchlist import ist_date_str


logger = logging.getLogger(__name__)


def _setup_global_intraday_file_logger() -> None:
    """Attach a rotating file handler for GlobalIntraday logs.

    Uvicorn reload can re-import modules; avoid adding duplicate handlers.
    """

    try:
        # File is: API/app/v1/background/global_intraday.py
        # Go up 3 levels from background/ -> v1/ -> app/ -> API/
        api_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
        default_path = os.path.join(api_root, "logs", "global_intraday.log")
        log_path = os.getenv("GLOBAL_INTRADAY_LOG_PATH", default_path)
        if not log_path:
            return

        log_dir = os.path.dirname(os.path.abspath(log_path))
        if log_dir:
            os.makedirs(log_dir, exist_ok=True)

        for h in list(logger.handlers):
            if isinstance(h, RotatingFileHandler) and getattr(h, "baseFilename", None) == os.path.abspath(log_path):
                return

        handler = RotatingFileHandler(
            log_path,
            maxBytes=int(os.getenv("GLOBAL_INTRADAY_LOG_MAX_BYTES", str(5 * 1024 * 1024)) or str(5 * 1024 * 1024)),
            backupCount=int(os.getenv("GLOBAL_INTRADAY_LOG_BACKUP_COUNT", "5") or "5"),
        )
        handler.setLevel(logging.INFO)
        handler.setFormatter(
            logging.Formatter(
                fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
            )
        )

        logger.addHandler(handler)
        # Prevent double-write if root logger is configured with its own file handler.
        logger.propagate = True
    except Exception:
        # Never break the background loop due to logging setup.
        logger.debug("[GlobalIntraday] Failed to attach file logger", exc_info=True)


_setup_global_intraday_file_logger()

GLOBAL_ZERODHA_EMAIL = os.getenv("GLOBAL_ZERODHA_EMAIL", "kumar@movex.ai").lower()

# Keep the background cycle bounded so it doesn't monopolize the machine.
# NOTE: if <= 0, treat as UNLIMITED (analyze all eligible symbols in the cycle).
MAX_ANALYSES_PER_CYCLE = int(os.getenv("GLOBAL_INTRADAY_MAX_ANALYSES", "120"))

# Scheduling
FULL_INTERVAL_SECONDS = int(os.getenv("GLOBAL_INTRADAY_FULL_INTERVAL_SECONDS", "900"))
EARLY_FAST_INTERVAL_SECONDS = int(os.getenv("GLOBAL_INTRADAY_EARLY_FAST_INTERVAL_SECONDS", "300"))
EARLY_FAST_WINDOW_START = os.getenv("GLOBAL_INTRADAY_EARLY_FAST_WINDOW_START", "09:15")
EARLY_FAST_WINDOW_END = os.getenv("GLOBAL_INTRADAY_EARLY_FAST_WINDOW_END", "10:00")

# List sizes
ET_LIMIT_PER_SIDE = int(os.getenv("GLOBAL_INTRADAY_ET_LIMIT_PER_SIDE", "20"))
EARLY_MOVERS_LIMIT = int(os.getenv("GLOBAL_INTRADAY_EARLY_MOVERS_LIMIT", "40"))
MANUAL_LIMIT = int(os.getenv("GLOBAL_INTRADAY_MANUAL_LIMIT", "50"))

IST = ZoneInfo("Asia/Kolkata")
_GLOBAL_INTRADAY_LOCK = asyncio.Lock()

# Market-hours enforcement (default ON): prevents GPT calls after-hours.
GLOBAL_INTRADAY_ENFORCE_MARKET_HOURS = (os.getenv("GLOBAL_INTRADAY_ENFORCE_MARKET_HOURS", "1") or "1").strip().lower() not in (
    "0",
    "false",
    "no",
    "off",
)


def _is_market_hours_ist(now_utc: Optional[datetime] = None) -> bool:
    """Backward-compatible wrapper for configurable backend market hours."""
    from app.v1.utils.market_time import is_backend_market_hours

    return is_backend_market_hours(now_utc)


def _next_backend_market_open_ist(now_utc: Optional[datetime] = None) -> Optional[datetime]:
    """Return the next backend market open time in IST."""
    from app.v1.utils.market_time import backend_market_window

    now = now_utc or datetime.utcnow()
    try:
        ist_now = now.replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
    except Exception:
        ist_now = datetime.now(IST)

    w = backend_market_window()
    candidate = ist_now.replace(hour=w.start_h, minute=w.start_m, second=0, microsecond=0)

    # If we've passed today's open, move to the next day.
    if ist_now >= candidate:
        candidate = candidate + timedelta(days=1)

    # Skip weekends.
    while candidate.weekday() >= 5:
        candidate = candidate + timedelta(days=1)

    return candidate


def _seconds_until(dt_from: datetime, dt_to: datetime) -> int:
    try:
        sec = int((dt_to - dt_from).total_seconds())
    except Exception:
        sec = 0
    return max(5, sec)


def _parse_hhmm(s: str, *, default_h: int, default_m: int) -> tuple[int, int]:
    try:
        raw = (s or "").strip()
        if ":" not in raw:
            return default_h, default_m
        hh, mm = raw.split(":", 1)
        h = int(hh)
        m = int(mm)
        if 0 <= h <= 23 and 0 <= m <= 59:
            return h, m
    except Exception:
        pass
    return default_h, default_m


def _in_early_fast_window(now_utc: Optional[datetime] = None) -> bool:
    now = now_utc or datetime.utcnow()
    try:
        ist_now = now.replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
    except Exception:
        ist_now = datetime.now(IST)

    sh, sm = _parse_hhmm(EARLY_FAST_WINDOW_START, default_h=9, default_m=15)
    eh, em = _parse_hhmm(EARLY_FAST_WINDOW_END, default_h=10, default_m=0)

    start = ist_now.replace(hour=sh, minute=sm, second=0, microsecond=0)
    end = ist_now.replace(hour=eh, minute=em, second=0, microsecond=0)
    return start <= ist_now < end


def _load_early_movers_symbols(db) -> List[str]:
    """Load today's (or latest) early movers snapshot symbols (DB-only)."""

    ist_date = ist_date_str()
    early_coll = db[os.getenv("EARLY_MOVERS_SNAPSHOT_COLLECTION", "early_movers_snapshots")]
    em_snap = early_coll.find_one({"date": ist_date})
    if not em_snap:
        em_snap = early_coll.find_one({}, sort=[("date", -1)])

    out: List[str] = []
    if not isinstance(em_snap, dict):
        return out

    top = em_snap.get("top") if isinstance(em_snap.get("top"), dict) else {}
    bullish = top.get("bullish") if isinstance(top.get("bullish"), list) else []
    bearish = top.get("bearish") if isinstance(top.get("bearish"), list) else []

    for it in bullish:
        if isinstance(it, dict) and (it.get("symbol") or "").strip():
            sym = (it.get("symbol") or "").strip().upper()
            if sym and sym not in out:
                out.append(sym)

    for it in bearish:
        if isinstance(it, dict) and (it.get("symbol") or "").strip():
            sym = (it.get("symbol") or "").strip().upper()
            if sym and sym not in out:
                out.append(sym)

    if EARLY_MOVERS_LIMIT > 0:
        out = out[: int(EARLY_MOVERS_LIMIT)]
    return out


def _load_et_symbols_and_source(db) -> Tuple[List[str], Dict[str, str]]:
    """Load ET movers symbols from `live_movers` (DB-only)."""

    symbols: List[str] = []
    source_by_symbol: Dict[str, str] = {}
    if ET_LIMIT_PER_SIDE <= 0:
        return symbols, source_by_symbol

    gainers = list(
        db["live_movers"]
        .find({"mover_type": "GAINER"}, {"_id": 0, "stock_id": 1, "rank": 1})
        .sort([("rank", 1), ("last_updated", -1)])
        .limit(int(ET_LIMIT_PER_SIDE))
    )
    losers = list(
        db["live_movers"]
        .find({"mover_type": "LOSER"}, {"_id": 0, "stock_id": 1, "rank": 1})
        .sort([("rank", 1), ("last_updated", -1)])
        .limit(int(ET_LIMIT_PER_SIDE))
    )

    stock_ids: List[str] = []
    for r in gainers + losers:
        sid = r.get("stock_id")
        if isinstance(sid, str) and sid and sid not in stock_ids:
            stock_ids.append(sid)

    if not stock_ids:
        return symbols, source_by_symbol

    stocks = list(db["stocks"].find({"stock_id": {"$in": stock_ids}}, {"_id": 0, "stock_id": 1, "symbol": 1}))
    sym_by_id: Dict[str, str] = {}
    for s in stocks:
        sid = s.get("stock_id")
        sym = (s.get("symbol") or "").strip().upper()
        if sid and sym:
            sym_by_id[str(sid)] = sym

    for r in gainers:
        sid = r.get("stock_id")
        sym = sym_by_id.get(str(sid)) if sid else None
        if sym and sym not in symbols:
            symbols.append(sym)
            source_by_symbol[sym] = "GAINER"

    for r in losers:
        sid = r.get("stock_id")
        sym = sym_by_id.get(str(sid)) if sid else None
        if sym and sym not in symbols:
            symbols.append(sym)
            source_by_symbol[sym] = "LOSER"

    return symbols, source_by_symbol


def _load_manual_symbols(db) -> List[str]:
    """Load distinct manual/watchlist symbols for today's IST date (DB-only)."""

    if MANUAL_LIMIT <= 0:
        return []

    ist_date = ist_date_str()
    out: List[str] = []

    cur = (
        db["user_portfolio_items"]
        .find({"ist_date": ist_date, "status": "ACTIVE"}, {"_id": 0, "symbol": 1})
        .sort([("updated_at", -1), ("created_at", -1)])
    )
    for it in cur:
        if len(out) >= int(MANUAL_LIMIT):
            break
        sym = (it.get("symbol") or "").strip().upper()
        if sym and sym not in out:
            out.append(sym)
    return out


def _get_global_user_id(db) -> Optional[str]:
    """Resolve the global Zerodha user from email.

    We treat this user (kumar@movex.ai by default) as the single
    owner of the Zerodha connection and the logical "owner" of
    background GPT analyses. No per‑frontend user computation.
    """

    user_doc = db["users"].find_one({"email": GLOBAL_ZERODHA_EMAIL})
    if not user_doc:
        logger.error("[GlobalIntraday] Global Zerodha user %s not found in users collection", GLOBAL_ZERODHA_EMAIL)
        return None
    return str(user_doc.get("_id"))


def _get_global_account_id(db) -> Optional[str]:
    """Resolve the global Zerodha user's account_id (string)."""

    user_doc = db["users"].find_one({"email": GLOBAL_ZERODHA_EMAIL})
    if not user_doc:
        return None
    acct = user_doc.get("account_id")
    return str(acct) if acct is not None else None


def _get_global_zerodha_client(db) -> Optional[ZerodhaClient]:
    """Build ZerodhaClient from the global user's stored settings.

    This reuses the existing `zerodha_settings` collection: we look up
    the record where `user_id` matches the global user's Mongo _id.
    """

    user_id = _get_global_user_id(db)
    if not user_id:
        return None

    settings = db["zerodha_settings"].find_one({"user_id": user_id})
    if not settings:
        logger.error("[GlobalIntraday] Zerodha settings not found for user_id=%s", user_id)
        return None

    access_token = settings.get("access_token")
    if not access_token:
        logger.error("[GlobalIntraday] Zerodha access_token missing for global user_id=%s", user_id)
        return None

    client = ZerodhaClient(
        api_key=settings.get("api_key"),
        api_secret=settings.get("api_secret"),
        access_token=access_token,
    )

    # Lightweight health check: if this fails (e.g. TokenException),
    # we treat Zerodha as unauthenticated and skip the entire cycle.
    try:
        client.get_profile()
    except Exception as e:  # pragma: no cover - defensive logging
        logger.error(
            "[GlobalIntraday] Zerodha profile check failed for global user_id=%s: %s",
            user_id,
            e,
        )
        return None

    return client


def _run_intraday_cycle(db, *, mode: str) -> None:
    """Run one intraday analysis cycle.

    IMPORTANT:
    - DB-only list reads (early movers snapshot, live_movers, manual watchlist).
    - Must NOT scrape ET or rebuild live_movers.
    """

    logger.info("[GlobalIntraday] Cycle start | mode=%s", mode)

    zerodha_client = _get_global_zerodha_client(db)
    if not zerodha_client:
        logger.error("[GlobalIntraday] Global Zerodha client unavailable; skipping analysis")
        return

    early_syms = _load_early_movers_symbols(db)
    et_syms, et_source_by_sym = _load_et_symbols_and_source(db)
    manual_syms = _load_manual_symbols(db)

    mode_norm = (mode or "").strip().lower()
    if mode_norm == "early_only":
        # During the early-fast window, only refresh early movers.
        symbols = early_syms
        source_by_symbol: Dict[str, str] = {s: "EARLY_MOVERS" for s in early_syms}
        context = "global_intraday_early_fast"
    else:
        # Full cycle uses union: Early movers + ET (top 10+10) + manual watchlist.
        symbols = list(dict.fromkeys([*early_syms, *et_syms, *manual_syms]))
        source_by_symbol = {**{s: "EARLY_MOVERS" for s in early_syms}, **et_source_by_sym}
        for s in manual_syms:
            source_by_symbol.setdefault(s, "MANUAL")
        context = "global_intraday"

    cap: Optional[int] = None
    if MAX_ANALYSES_PER_CYCLE > 0:
        cap = int(MAX_ANALYSES_PER_CYCLE)

    logger.info(
        "[GlobalIntraday] Universe | mode=%s early=%d et=%d manual=%d unique=%d cap=%s",
        mode_norm,
        len(early_syms),
        len(et_syms),
        len(manual_syms),
        len(symbols) if isinstance(symbols, list) else 0,
        str(cap) if cap is not None else "UNLIMITED",
    )

    if not symbols:
        logger.info("[GlobalIntraday] No symbols to analyze")
        return

    # Resolve symbols to stock_ids using stocks master.
    stocks = list(
        db["stocks"].find(
            {"symbol": {"$in": symbols}, "exchange": "NSE"},
            {"_id": 0, "stock_id": 1, "symbol": 1, "instrument_token": 1},
        )
    )
    stock_by_symbol: Dict[str, Dict[str, Any]] = {}
    for s in stocks:
        sym = (s.get("symbol") or "").strip().upper()
        if sym:
            stock_by_symbol[sym] = s

    eligible: List[Tuple[str, str]] = []
    for sym in symbols:
        st = stock_by_symbol.get(sym)
        if not isinstance(st, dict):
            continue
        if st.get("instrument_token") is None:
            continue
        sid = st.get("stock_id")
        if isinstance(sid, str) and sid.strip():
            eligible.append((sid.strip(), sym))

    if not eligible:
        logger.info("[GlobalIntraday] No eligible stocks in master list")
        return

    user_id = _get_global_user_id(db)
    if not user_id:
        logger.error("[GlobalIntraday] Global user_id missing, cannot tag analyses")
        return

    account_id = _get_global_account_id(db)

    analyzed = 0
    attempted = 0
    skipped_fresh = 0
    failed = 0
    failed_symbols: List[str] = []
    success_symbols: List[str] = []
    skipped_symbols: List[str] = []

    # Default to NO skipping: user expectation is to analyze the entire list every cycle.
    # Set GLOBAL_INTRADAY_FRESHNESS_MINUTES > 0 to enable skip-on-fresh behavior.
    freshness_minutes = int(os.getenv("GLOBAL_INTRADAY_FRESHNESS_MINUTES", "0"))
    cutoff = datetime.utcnow() - timedelta(minutes=max(0, freshness_minutes)) if freshness_minutes > 0 else None

    for sid, symbol in eligible:
        if cap is not None and analyzed >= cap:
            logger.info(
                "[GlobalIntraday] Reached per-cycle cap (max=%d); stopping early | eligible=%d analyzed=%d",
                cap,
                len(eligible),
                analyzed,
            )
            break

        if cutoff is not None:
            try:
                recent = db["stock_analysis_snapshots"].find_one(
                    {"stock_id": sid, "timestamp": {"$gte": cutoff}},
                    sort=[("timestamp", -1), ("_id", -1)],
                    projection={"_id": 1, "timestamp": 1},
                )
                if recent:
                    skipped_fresh += 1
                    if len(skipped_symbols) < 20:
                        skipped_symbols.append(symbol)
                    continue
            except Exception:
                # If the freshness check fails, still attempt analysis (better stale than nothing).
                logger.exception("[GlobalIntraday] Freshness check failed for %s", symbol)

        attempted += 1
        try:
            analysis = analyze_symbol_service(
                db=db,
                zerodha_client=zerodha_client,
                symbol=symbol,
                # Mandatory: provide multi-timeframe computed indicators/strategies to GPT.
                # NOTE: Prompt mode controls whether raw candles are dumped; we rely on
                # computed summaries + flags, not raw series.
                timeframes=["5minute", "15minute", "30minute", "day", "week", "month"],
                question="general intraday analysis",
                context=context,
                user_id=user_id,
                include_market_data=True,
            )
        except Exception as e:  # pragma: no cover - defensive logging
            logger.exception("[GlobalIntraday] Analysis failed for %s: %s", symbol, e)
            failed += 1
            if len(failed_symbols) < 10:
                failed_symbols.append(symbol)
            continue

        success_symbols.append(symbol)

        source: str = source_by_symbol.get(symbol) or "ET_MOVERS"

        market_data = analysis.get("market_data") if isinstance(analysis, dict) else None
        features = analysis.get("features") if isinstance(analysis, dict) else None

        conf = normalize_confidence(
            analysis.get("confidence") if isinstance(analysis, dict) else None,
            decision_probability=analysis.get("decision_probability") if isinstance(analysis, dict) else None,
            score=analysis.get("score") if isinstance(analysis, dict) else None,
        )
        if isinstance(analysis, dict):
            analysis["confidence"] = conf

        targets = analysis.get("targets") if isinstance(analysis, dict) else None
        primary_target = None
        if isinstance(targets, list) and targets:
            primary_target = targets[0]

        snapshot_doc: Dict[str, Any] = {
            # ANALYSIS = opinion over time (append‑only)
            "stock_id": sid,
            "timestamp": datetime.utcnow(),
            "decision": analysis.get("decision"),
            "confidence": conf,
            "entry": analysis.get("entry_price"),
            "stop_loss": analysis.get("stop_loss"),
            "target": analysis.get("price_target") or analysis.get("target") or primary_target,
            "reason": analysis.get("rationale"),
            "source": source,  # GAINER | LOSER | EARLY_MOVERS | MANUAL
            "analysis": analysis,  # full GPT payload for audit/debug
            # Raw & computed data for replay/backtesting/debug
            "market_data": market_data,
            "features": features,
        }

        snapshot_id = None
        try:
            ins = db["stock_analysis_snapshots"].insert_one(snapshot_doc)
            snapshot_id = str(ins.inserted_id)
        except Exception:  # pragma: no cover - defensive logging
            logger.exception("[GlobalIntraday] Failed to persist analysis snapshot for %s", symbol)

        # Materialized alerts list (DB-only): populate intraday_alerts for the Alerts page.
        try:
            if snapshot_id:
                upsert_intraday_alert_from_snapshot(
                    db,
                    snapshot_id=snapshot_id,
                    stock_id=sid,
                    symbol=symbol,
                    source=source,
                    analysis=analysis if isinstance(analysis, dict) else {},
                    market_data=market_data if isinstance(market_data, dict) else None,
                    snapshot_timestamp=snapshot_doc.get("timestamp"),
                )
        except Exception:
            # Never break cycle due to alerts materialization.
            logger.debug("[GlobalIntraday] Alert materialization failed for %s", symbol, exc_info=True)

        # Paper trading simulation (DB-only): update OPEN paper trades with latest candle,
        # then create a new one when the signal is VERY_STRONG.
        try:
            process_signal_and_market_update(
                db=db,
                user_id=user_id,
                account_id=account_id,
                stock_id=sid,
                symbol=symbol,
                analysis=analysis,
                market_data=market_data if isinstance(market_data, dict) else None,
                source=source,
                preferred_timeframe="15minute",
                snapshot_id=snapshot_id,
                snapshot_timestamp=snapshot_doc.get("timestamp"),
                zerodha_client=zerodha_client,
            )
        except Exception:
            logger.exception("[GlobalIntraday] Paper trading simulation failed for %s", symbol)

        analyzed += 1

    logger.info(
        "[GlobalIntraday] Cycle finished | eligible=%d attempted=%d analyzed=%d skipped_fresh=%d failed=%d cap=%s freshness_minutes=%s",
        len(eligible),
        attempted,
        analyzed,
        skipped_fresh,
        failed,
        str(cap) if cap is not None else "UNLIMITED",
        str(freshness_minutes),
    )

    if skipped_symbols:
        logger.info("[GlobalIntraday] Skipped (fresh) symbols (first %d): %s", len(skipped_symbols), skipped_symbols)

    if success_symbols:
        logger.info("[GlobalIntraday] Succeeded symbols (%d): %s", len(success_symbols), success_symbols)

    if failed_symbols:
        logger.info("[GlobalIntraday] Failed symbols (first %d): %s", len(failed_symbols), failed_symbols)


def _run_intraday_cycle_sync_mode(mode: str) -> None:
    """Thread entrypoint: open DB in-thread and run one cycle for `mode`."""

    db_gen = get_mongo_db()
    db = next(db_gen)
    try:
        _run_intraday_cycle(db, mode=mode)
    finally:
        try:
            db_gen.close()
        except Exception:
            logger.debug("[GlobalIntraday] Error closing DB generator", exc_info=True)


async def global_intraday_loop(interval_seconds: int = 60) -> None:
    """Background loop for global Zerodha + GPT intraday analysis.

        Runs forever.

        Scheduling semantics (IST):
        - Full cycle: every 15 minutes (default 900s).
        - Early-movers fast cycle: every 5 minutes (default 300s) ONLY during
            the first 45 minutes window (default 09:15–10:00 IST).

        IMPORTANT:
        - Must NOT scrape ET or rebuild live_movers.
            `movers_refresh_loop` is responsible for ET scraping and `live_movers` rebuild.
    """

    # Small startup delay so the app is fully up.
    await asyncio.sleep(5)
    # Back-compat: the passed interval_seconds is treated as the full-cycle interval.
    full_interval = int(interval_seconds) if int(interval_seconds) > 0 else int(FULL_INTERVAL_SECONDS)
    fast_interval = int(EARLY_FAST_INTERVAL_SECONDS) if int(EARLY_FAST_INTERVAL_SECONDS) > 0 else 300

    logger.info(
        "[GlobalIntraday] Background loop started | full=%ss fast=%ss window=%s-%s | global_user=%s",
        full_interval,
        fast_interval,
        EARLY_FAST_WINDOW_START,
        EARLY_FAST_WINDOW_END,
        GLOBAL_ZERODHA_EMAIL,
    )

    last_full_run_utc: Optional[datetime] = None

    while True:
        # Hard gate to avoid GPT calls after-hours.
        if GLOBAL_INTRADAY_ENFORCE_MARKET_HOURS and (not _is_market_hours_ist(datetime.utcnow())):
            nxt = _next_backend_market_open_ist(datetime.utcnow())
            if nxt:
                try:
                    ist_now = datetime.utcnow().replace(tzinfo=ZoneInfo("UTC")).astimezone(IST)
                except Exception:
                    ist_now = datetime.now(IST)
                logger.info("[GlobalIntraday] Skipped: outside backend market hours | now=%s next_open=%s", ist_now.strftime("%Y-%m-%d %H:%M"), nxt.strftime("%Y-%m-%d %H:%M"))
                await asyncio.sleep(_seconds_until(ist_now, nxt))
                continue
            await asyncio.sleep(full_interval)
            continue

        if _GLOBAL_INTRADAY_LOCK.locked():
            logger.info("[GlobalIntraday] Previous cycle still running; skipping this tick")
            await asyncio.sleep(fast_interval if _in_early_fast_window() else full_interval)
            continue

        async with _GLOBAL_INTRADAY_LOCK:
            try:
                now = datetime.utcnow()
                in_fast = _in_early_fast_window(now)

                mode = "full"
                if in_fast:
                    if last_full_run_utc is None or (now - last_full_run_utc).total_seconds() >= float(full_interval):
                        mode = "full"
                        last_full_run_utc = now
                    else:
                        mode = "early_only"
                else:
                    mode = "full"
                    last_full_run_utc = now

                # Offload blocking Zerodha + GPT + DB writes to a worker thread.
                await asyncio.to_thread(_run_intraday_cycle_sync_mode, mode)
            except Exception:  # pragma: no cover - defensive logging
                logger.exception("[GlobalIntraday] Unexpected error in cycle")

        await asyncio.sleep(fast_interval if _in_early_fast_window() else full_interval)
