"""Generate JSON data for the sell-side SPX + complacency dashboard."""

from __future__ import annotations

import json
from datetime import datetime, timezone
from pathlib import Path

import pandas as pd

from sentiment_indicator.core.position import Position, daily_position_snapshots


ROOT = Path(__file__).resolve().parent.parent
OUTPUT = Path(__file__).resolve().parent / "complacency_data.json"
SPX_PATH = ROOT / "data" / "cache" / "SPX.parquet"
SCORE_PATH = ROOT / "output" / "spx" / "sell" / "complacency_score.csv"
ACTIONS_PATH = ROOT / "output" / "spx" / "sell" / "trade_actions.csv"
PERIODS: dict[str, tuple[str | None, str]] = {
    "daily": (None, "日线"),
    "weekly": ("W-FRI", "周线"),
    "monthly": ("M", "月线"),
    "yearly": ("Y", "年线"),
}
ACTION_CONFIG = {
    "🟡 预警": {"lane": "warn", "short": "预", "color": "#facc15", "priority": 1},
    "⚪ 预警作废": {"lane": "warn", "short": "废", "color": "#9ca3af", "priority": 0},
    "🟠 清仓50%": {"lane": "sell", "short": "50", "color": "#fb923c", "priority": 1},
    "🔴 清仓": {"lane": "sell", "short": "清", "color": "#ef4444", "priority": 3},
    "🔴 清仓100%": {"lane": "sell", "short": "清", "color": "#ef4444", "priority": 3},
    "🟠 清仓重置": {"lane": "sell", "short": "重", "color": "#f97316", "priority": 2},
    "🟢 买入": {"lane": "buy", "short": "买", "color": "#22c55e", "priority": 2},
    "🔵 回场": {"lane": "buy", "short": "回", "color": "#38bdf8", "priority": 1},
}
LANE_STYLE = {
    "warn": {"position": "aboveBar", "shape": "circle"},
    "sell": {"position": "inBar", "shape": "square"},
    "buy": {"position": "belowBar", "shape": "arrowUp"},
}
POSITION_MAP = {
    Position.EMPTY.value: 0.0,
    Position.HALF.value: 50.0,
    Position.FULL.value: 100.0,
}


def _load_spx() -> pd.DataFrame:
    spx = pd.read_parquet(SPX_PATH)
    spx["date"] = pd.to_datetime(spx["date"])
    spx = spx.sort_values("date").reset_index(drop=True)
    return spx


def _load_score() -> pd.DataFrame:
    score = pd.read_csv(SCORE_PATH)
    score["date"] = pd.to_datetime(score["date"])
    score = score.sort_values("date").reset_index(drop=True)
    return score


def _load_actions() -> pd.DataFrame:
    actions = pd.read_csv(ACTIONS_PATH)
    actions = actions[actions["date"].notna() & actions["动作"].notna()].copy()
    actions["_order"] = range(len(actions))
    actions["date"] = pd.to_datetime(actions["date"])
    actions = actions.sort_values(["date", "_order"]).reset_index(drop=True)
    return actions


def _last_valid(series: pd.Series) -> float | pd.NA:
    nonnull = series.dropna()
    if nonnull.empty:
        return pd.NA
    return float(nonnull.iloc[-1])


def _series_from_frame(frame: pd.DataFrame) -> dict[str, object]:
    ohlc = []
    complacency = []
    position = []
    for date, open_, high, low, close, score_value, position_value in frame.itertuples(index=False, name=None):
        ts = int(pd.Timestamp(date).timestamp())
        date_str = pd.Timestamp(date).strftime("%Y-%m-%d")
        ohlc.append({
            "time": ts,
            "date": date_str,
            "open": round(float(open_), 2),
            "high": round(float(high), 2),
            "low": round(float(low), 2),
            "close": round(float(close), 2),
        })

        point: dict[str, object] = {"time": ts, "date": date_str}
        if pd.notna(score_value):
            point["value"] = round(float(score_value), 2)
        complacency.append(point)

        position_point: dict[str, object] = {"time": ts, "date": date_str}
        if pd.notna(position_value):
            position_point["value"] = round(float(position_value), 1)
        position.append(position_point)

    score_nonnull = frame.dropna(subset=["自满得分"])
    return {
        "ohlc": ohlc,
        "complacency": complacency,
        "position": position,
        "meta": {
            "start_date": pd.Timestamp(frame["date"].iloc[0]).strftime("%Y-%m-%d"),
            "end_date": pd.Timestamp(frame["date"].iloc[-1]).strftime("%Y-%m-%d"),
            "num_price_bars": len(ohlc),
            "num_score_points": int(score_nonnull["自满得分"].notna().sum()),
        },
    }


def _aggregate_period(frame: pd.DataFrame, freq: str | None) -> pd.DataFrame:
    if freq is None:
        return frame.copy()

    tmp = frame.copy()
    tmp["bucket"] = tmp["date"].dt.to_period(freq)
    aggregated = (
        tmp.groupby("bucket", sort=True)
        .agg(
            date=("date", "last"),
            open=("open", "first"),
            high=("high", "max"),
            low=("low", "min"),
            close=("close", "last"),
            自满得分=("自满得分", _last_valid),
            position=("position", _last_valid),
        )
        .reset_index(drop=True)
    )
    return aggregated


def _build_position_series(aligned: pd.DataFrame, actions: pd.DataFrame) -> pd.Series:
    # Drive position from the canonical state machine in `core.position` rather
    # than trusting trade_actions.csv 仓位 column directly — the column can be
    # written by non-transition rows (预警/作废) and drifts from reality.
    snapshots = daily_position_snapshots(actions)
    if snapshots.empty:
        return pd.Series(100.0, index=aligned.index)

    snapshots = snapshots.assign(position=snapshots["position"].map(POSITION_MAP))
    merged = aligned[["date"]].merge(snapshots, on="date", how="left")
    return merged["position"].ffill().fillna(100.0)


def _bucket_key(ts: pd.Timestamp, freq: str | None) -> str:
    dt = pd.Timestamp(ts)
    if freq is None:
        return dt.strftime("%Y-%m-%d")
    return str(dt.to_period(freq))


def _build_action_payload(
    actions: pd.DataFrame,
    period_frame: pd.DataFrame,
    freq: str | None,
) -> tuple[list[dict[str, object]], list[dict[str, object]]]:
    if actions.empty or period_frame.empty:
        return [], []

    period_meta = period_frame[["date"]].copy()
    period_meta["bucket"] = period_meta["date"].apply(lambda d: _bucket_key(d, freq))
    bucket_to_target = {
        row.bucket: (
            int(pd.Timestamp(row.date).timestamp()),
            pd.Timestamp(row.date).strftime("%Y-%m-%d"),
        )
        for row in period_meta.itertuples(index=False)
    }

    tmp = actions.copy()
    tmp["bucket"] = tmp["date"].apply(lambda d: _bucket_key(d, freq))
    tmp = tmp[tmp["bucket"].isin(bucket_to_target)].copy()
    if tmp.empty:
        return [], []

    markers: list[dict[str, object]] = []
    summaries: list[dict[str, object]] = []

    for bucket, grp in tmp.groupby("bucket", sort=True):
        target_time, target_date = bucket_to_target[bucket]
        detail_parts = []
        for row in grp.itertuples(index=False):
            action = str(row.动作)
            action_date = pd.Timestamp(row.date).strftime("%Y-%m-%d")
            detail_parts.append(f"{action_date} {action}")

        summaries.append({
            "time": target_time,
            "date": target_date,
            "text": " | ".join(detail_parts),
        })

        for lane in ("warn", "sell", "buy"):
            lane_rows = []
            for row in grp.itertuples(index=False):
                cfg = ACTION_CONFIG.get(str(row.动作))
                if cfg and cfg["lane"] == lane:
                    lane_rows.append((row, cfg))
            if not lane_rows:
                continue

            lane_rows.sort(key=lambda item: item[1]["priority"], reverse=True)
            short_tokens = []
            for _, cfg in lane_rows:
                token = str(cfg["short"])
                if token not in short_tokens:
                    short_tokens.append(token)
            text = "/".join(short_tokens[:3])
            if len(short_tokens) > 3:
                text += "+"

            top_cfg = lane_rows[0][1]
            style = LANE_STYLE[lane]
            markers.append({
                "time": target_time,
                "position": style["position"],
                "shape": style["shape"],
                "color": top_cfg["color"],
                "text": text,
            })

    return markers, summaries


def build_payload(
    spx_df: pd.DataFrame | None = None,
    score_df: pd.DataFrame | None = None,
    actions_df: pd.DataFrame | None = None,
) -> dict[str, object]:
    spx = _load_spx() if spx_df is None else spx_df.copy()
    score = _load_score() if score_df is None else score_df.copy()
    actions = _load_actions() if actions_df is None else actions_df.copy()

    spx["date"] = pd.to_datetime(spx["date"])
    score["date"] = pd.to_datetime(score["date"])
    if "_order" not in actions.columns:
        actions["_order"] = range(len(actions))
    actions["date"] = pd.to_datetime(actions["date"])
    actions = actions.sort_values(["date", "_order"]).reset_index(drop=True)

    merged = spx.merge(
        score[["date", "自满得分"]],
        on="date",
        how="left",
        validate="one_to_one",
    )

    aligned = merged[["date", "open", "high", "low", "close", "自满得分"]].copy()
    aligned["position"] = _build_position_series(aligned, actions)
    score_nonnull = merged.dropna(subset=["自满得分"])
    latest = score_nonnull.iloc[-1]
    periods: dict[str, object] = {}
    for key, (freq, label) in PERIODS.items():
        period_frame = _aggregate_period(aligned, freq)
        period_payload = _series_from_frame(period_frame)
        markers, summaries = _build_action_payload(actions, period_frame, freq)
        period_payload["markers"] = markers
        period_payload["action_summaries"] = summaries
        period_payload["meta"]["label"] = label
        period_payload["meta"]["key"] = key
        periods[key] = period_payload

    return {
        "periods": periods,
        "thresholds": {"elevated": 7.0, "extreme": 8.0},
        "meta": {
            "title": "SPX - Complacency Score Dashboard",
            "spx_start_date": pd.Timestamp(merged["date"].iloc[0]).strftime("%Y-%m-%d"),
            "spx_end_date": pd.Timestamp(merged["date"].iloc[-1]).strftime("%Y-%m-%d"),
            "score_start_date": pd.Timestamp(score_nonnull["date"].iloc[0]).strftime("%Y-%m-%d"),
            "score_end_date": pd.Timestamp(latest["date"]).strftime("%Y-%m-%d"),
            "latest_date": pd.Timestamp(latest["date"]).strftime("%Y-%m-%d"),
            "latest_score": round(float(latest["自满得分"]), 2),
            "latest_close": round(float(latest["close"]), 2),
            "latest_position": round(float(aligned["position"].iloc[-1]), 1),
            "num_score_points": int(score_nonnull["自满得分"].notna().sum()),
            "default_period": "daily",
            "score_period_rule": "period_last_valid",
            "updated_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M UTC"),
        },
    }


def main() -> None:
    payload = build_payload()
    OUTPUT.write_text(json.dumps(payload, ensure_ascii=False))
    meta = payload["meta"]
    daily_meta = payload["periods"]["daily"]["meta"]
    print(
        "✅ complacency_data.json saved: "
        f"{daily_meta['num_price_bars']} daily price bars, "
        f"{meta['num_score_points']} score points, "
        f"latest={meta['latest_date']} score={meta['latest_score']:.2f}"
    )


if __name__ == "__main__":
    main()
