"""
APEX V16 — Logging setup.

Provides structured JSONL loggers for analytics and a human-readable
text logger for debugging. All log files are scoped per (mode, account)
and live under config.log_dir.

Six log streams:
  - trade_log.jsonl    : trade open/close events
  - brain_log.jsonl    : Brain decisions (HOLD/EXIT/PARTIAL/MOVE_BE)
  - regime_log.jsonl   : regime/structure observations during trade life
  - session_log.jsonl  : session events (start, halt, target hit, daily reset)
  - error_log.jsonl    : exceptions and recoverable errors
  - system.log         : human-readable text log (rotates daily)

Design notes:
  - JSONL is one-line-per-event; analytics with jq, polars, pandas trivial
  - All events get an ISO-8601 UTC timestamp automatically
  - The orchestrator gets a LoggerBundle (one object) and dispatches
  - JSONL writes flush on every event (no buffering = no data loss on crash)
"""

from __future__ import annotations

import json
import logging
import logging.handlers
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Optional


# ============================================================
# JSONL LOGGER
# ============================================================

class JsonlLogger:
    """
    One-event-per-line JSON logger. Append-only, flush-on-write.

    Use for analytical streams where each event is a self-contained
    JSON object that can be parsed independently.
    """

    def __init__(self, file_path: Path) -> None:
        self.file_path = Path(file_path)
        self.file_path.parent.mkdir(parents=True, exist_ok=True)

    def write(self, event: str, **fields: Any) -> None:
        """
        Write one JSONL event.

        Always includes:
          - ts: ISO-8601 UTC timestamp
          - event: event name (caller-provided)
        Plus any keyword fields the caller passes in.
        """
        record = {
            "ts": datetime.now(timezone.utc).isoformat(),
            "event": event,
            **fields,
        }
        # JSON-serialize, fall back to str() for non-serializable values
        line = json.dumps(record, default=str, ensure_ascii=False)
        with self.file_path.open("a", encoding="utf-8") as f:
            f.write(line + "\n")
            f.flush()


# ============================================================
# SDK LOG FILTERS
# ============================================================

class _SdkOrderFillsKwargFilter(logging.Filter):
    """
    BUG 5 (V16): silence cosmetic SDK error when ProjectX API returns an
    Order JSON with a `fills` field that project_x_py.models.Order does
    not declare. The TypeError is caught upstream in
    project_x_py.order_manager.tracking; only the logger.error line is
    user-visible, with no actionable signal.

    Narrow match: drops only records whose message contains both
    "Failed to create Order object from data" and "'fills'". Other
    errors from the same logger (genuine SDK failures) pass through.
    Drop the filter once the SDK Order dataclass adds the field.
    """
    _MARKER = "Failed to create Order object from data"
    _KWARG = "'fills'"

    def filter(self, record: logging.LogRecord) -> bool:
        msg = record.getMessage()
        if self._MARKER in msg and self._KWARG in msg:
            return False
        return True


# ============================================================
# LOGGER BUNDLE
# ============================================================

@dataclass
class LoggerBundle:
    """
    Aggregates all loggers used by the bot. Built once at startup
    and passed by reference to all modules that need to log.

    Convention: each module logs to its primary stream (e.g., Brain
    uses brain_log; risk_manager uses session_log + error_log on
    rejections; orchestrator uses trade_log on open/close).
    """
    trade_log: JsonlLogger
    brain_log: JsonlLogger
    regime_log: JsonlLogger
    session_log: JsonlLogger
    error_log: JsonlLogger
    system: logging.Logger     # standard text logger

    # ============================================================
    # Convenience helpers (most-used patterns)
    # ============================================================

    def log_trade_opened(
        self,
        symbol: str,
        brain: str,
        direction: str,
        contracts: int,
        entry_price: float,
        sl_price: float,
        tp_price: float,
        confidence: int,
        is_paper: bool,
        **extra: Any,
    ) -> None:
        self.trade_log.write(
            "trade_opened",
            symbol=symbol,
            brain=brain,
            direction=direction,
            contracts=contracts,
            entry_price=entry_price,
            sl_price=sl_price,
            tp_price=tp_price,
            confidence=confidence,
            is_paper=is_paper,
            **extra,
        )

    def log_trade_closed(
        self,
        symbol: str,
        brain: str,
        reason: str,
        exit_price: float,
        net_profit_usd: float,
        minutes_open: float,
        **extra: Any,
    ) -> None:
        self.trade_log.write(
            "trade_closed",
            symbol=symbol,
            brain=brain,
            reason=reason,
            exit_price=exit_price,
            net_profit_usd=net_profit_usd,
            minutes_open=minutes_open,
            **extra,
        )

    def log_brain_decision(
        self,
        symbol: str,
        brain: str,
        action: str,
        reason: str,
        minutes_open: float,
        progress_pct: float,
        net_profit_usd: float,
        **extra: Any,
    ) -> None:
        self.brain_log.write(
            "brain_decision",
            symbol=symbol,
            brain=brain,
            action=action,
            reason=reason,
            minutes_open=minutes_open,
            progress_pct=progress_pct,
            net_profit_usd=net_profit_usd,
            **extra,
        )

    def log_regime_change(
        self,
        symbol: str,
        from_struct: str,
        to_struct: str,
        from_regime: str,
        to_regime: str,
        **extra: Any,
    ) -> None:
        self.regime_log.write(
            "regime_change",
            symbol=symbol,
            from_struct=from_struct,
            to_struct=to_struct,
            from_regime=from_regime,
            to_regime=to_regime,
            **extra,
        )

    def log_session_event(self, event: str, **fields: Any) -> None:
        self.session_log.write(event, **fields)

    def log_error(
        self,
        where: str,
        error: str,
        symbol: Optional[str] = None,
        **extra: Any,
    ) -> None:
        self.error_log.write(
            "error",
            where=where,
            error=error,
            symbol=symbol,
            **extra,
        )
        # Also surface to system log for human visibility
        self.system.error(f"[{where}] {error}" + (f" symbol={symbol}" if symbol else ""))


# ============================================================
# BUILDER
# ============================================================

def build_logger_bundle(log_dir: Path, system_log_level: int = logging.INFO) -> LoggerBundle:
    """
    Build the standard LoggerBundle for the given log_dir.

    Args:
        log_dir: where to write log files (typically config.log_dir)
        system_log_level: stdlib logging level for system.log (INFO default)

    Returns:
        LoggerBundle ready to use. Idempotent: calling twice returns
        a fresh bundle pointing to the same files (append mode is safe).
    """
    log_dir = Path(log_dir)
    log_dir.mkdir(parents=True, exist_ok=True)

    # Text system logger (rotating daily)
    system = logging.getLogger(f"apex_v16.{log_dir.name}")
    system.setLevel(system_log_level)
    system.propagate = False  # do not bubble to root logger

    # Avoid duplicating handlers if called multiple times
    if not system.handlers:
        system_path = log_dir / "system.log"
        handler = logging.handlers.TimedRotatingFileHandler(
            filename=str(system_path),
            when="midnight",
            backupCount=14,           # 14 days retention
            encoding="utf-8",
            utc=True,
        )
        formatter = logging.Formatter(
            fmt="%(asctime)s %(levelname)s %(message)s",
            datefmt="%Y-%m-%dT%H:%M:%S",
        )
        handler.setFormatter(formatter)
        system.addHandler(handler)

        # Also echo to stdout for live tailing
        stream = logging.StreamHandler()
        stream.setFormatter(formatter)
        system.addHandler(stream)

        # Attach the same handlers to broker/SDK loggers so their
        # log.error(..., exc_info=True) tracebacks land in system.log
        # instead of being silently dropped (separate logger trees,
        # propagate=False on `system` blocks bubbling the other way).
        # Diagnostic-only — no runtime behavior change.
        for ext_name in ("topstepx_v16", "topstepx_adapter", "broker"):
            ext_logger = logging.getLogger(ext_name)
            ext_logger.setLevel(system_log_level)
            for h in (handler, stream):
                if h not in ext_logger.handlers:
                    ext_logger.addHandler(h)

        # BUG 5: silence cosmetic SDK Order(**data) "fills" kwarg error.
        # MUST be attached to the *emitting* logger: stdlib filters only
        # apply to records originated by that logger, not to records
        # propagated up from children. Today the only emit site is
        # project_x_py.order_manager.tracking:732 (verified on SDK 3.5.9).
        # If a future SDK reorganises the module path the cosmetic line
        # reappears — graceful degradation, not a crash.
        sdk_logger = logging.getLogger("project_x_py.order_manager.tracking")
        if not any(isinstance(f, _SdkOrderFillsKwargFilter)
                   for f in sdk_logger.filters):
            sdk_logger.addFilter(_SdkOrderFillsKwargFilter())

    return LoggerBundle(
        trade_log=JsonlLogger(log_dir / "trade_log.jsonl"),
        brain_log=JsonlLogger(log_dir / "brain_log.jsonl"),
        regime_log=JsonlLogger(log_dir / "regime_log.jsonl"),
        session_log=JsonlLogger(log_dir / "session_log.jsonl"),
        error_log=JsonlLogger(log_dir / "error_log.jsonl"),
        system=system,
    )
