"""
Record Polymarket live-data activity trades for selected markets under one event.

The WebSocket filter is event-level. Market filtering is done locally by slug,
condition id, market id, and token id.
"""
from __future__ import annotations

import asyncio
import json
import os
import signal
import ssl
import sys
import time
from datetime import datetime, timezone
from pathlib import Path
from typing import Any

import aiohttp
import click
import websockets
from dotenv import load_dotenv
from loguru import logger

ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if ROOT not in sys.path:
    sys.path.insert(0, ROOT)

from analysis.safe_btc5.clients import GAMMA_BASE  # noqa: E402
from analysis.lpl_orderbook.activity_trades import (  # noqa: E402
    ACTIVITY_WS_URL,
    JsonlWriter,
    TargetMarket,
    match_activity_trade,
    normalized_activity_row,
    select_target_markets,
)


def _market_set(raw: str) -> set[str] | None:
    if raw.strip().lower() == "auto":
        return None
    values = {item.strip().lower() for item in raw.split(",") if item.strip()}
    aliases = {"series": "moneyline", "match": "moneyline"}
    normalized = {aliases.get(item, item) for item in values}
    allowed = {"moneyline", *{f"game{i}" for i in range(1, 5)}}
    unknown = normalized - allowed
    if unknown:
        raise click.BadParameter(f"unsupported markets: {','.join(sorted(unknown))}")
    return normalized


def _setup_logging() -> None:
    logger.remove()
    logger.add(
        sys.stderr,
        level=os.environ.get("LOG_LEVEL", "INFO").upper(),
        format="<green>{time:YYYY-MM-DD HH:mm:ss}</green> | <level>{level:<7}</level> | {message}",
        colorize=True,
    )


async def _fetch_event(event_slug: str, insecure_ssl: bool) -> dict[str, Any]:
    connector = aiohttp.TCPConnector(ssl=False) if insecure_ssl else None
    async with aiohttp.ClientSession(connector=connector) as http:
        async with http.get(
            f"{GAMMA_BASE}/events/slug/{event_slug}",
            timeout=aiohttp.ClientTimeout(total=20),
        ) as resp:
            resp.raise_for_status()
            return await resp.json()


def _subscription(event_slug: str, include_comments: bool, comments_parent_id: str) -> dict[str, Any]:
    subscriptions: list[dict[str, str]] = [
        {
            "topic": "activity",
            "type": "trades",
            "filters": json.dumps({"event_slug": event_slug}, separators=(",", ":")),
        }
    ]
    if include_comments:
        subscriptions.append(
            {
                "topic": "comments",
                "type": "*",
                "filters": json.dumps(
                    {
                        "parentEntityID": int(comments_parent_id),
                        "parentEntityType": "Series",
                    },
                    separators=(",", ":"),
                ),
            }
        )
    return {"action": "subscribe", "subscriptions": subscriptions}


def _decode_items(raw: str) -> list[dict[str, Any]]:
    stripped = raw.strip()
    if not stripped or stripped.lower() in {"ping", "pong"}:
        return []
    try:
        decoded = json.loads(stripped)
    except json.JSONDecodeError:
        return []
    if isinstance(decoded, list):
        return [item for item in decoded if isinstance(item, dict)]
    if isinstance(decoded, dict):
        return [decoded]
    return []


async def _ping(ws: Any) -> None:
    try:
        while True:
            await asyncio.sleep(10)
            await ws.send("PING")
    except Exception:
        pass


async def run_recorder(
    *,
    event_slug: str,
    markets: set[str] | None,
    output_root: Path,
    include_comments: bool,
    comments_parent_id: str,
    insecure_ssl: bool,
    reconnect_delay_seconds: float,
    jsonl_flush_every: int,
    jsonl_flush_interval_seconds: float,
    stop_event: asyncio.Event,
) -> None:
    event = await _fetch_event(event_slug, insecure_ssl)
    target_markets = select_target_markets(event_slug, event.get("markets") or [], markets)
    if not target_markets:
        raise RuntimeError(f"no target markets found for {event_slug}: {sorted(markets)}")

    output_dir = output_root / event_slug
    output_dir.mkdir(parents=True, exist_ok=True)
    (output_dir / "event_meta.json").write_text(
        json.dumps(event, ensure_ascii=False, indent=2, sort_keys=True) + "\n",
        encoding="utf-8",
    )
    (output_dir / "target_markets.json").write_text(
        json.dumps(
            [target.__dict__ for target in target_markets],
            ensure_ascii=False,
            indent=2,
            sort_keys=True,
        )
        + "\n",
        encoding="utf-8",
    )

    trades_writer = JsonlWriter(
        output_dir / "activity_trades.jsonl",
        flush_every=jsonl_flush_every,
        flush_interval_seconds=jsonl_flush_interval_seconds,
    )
    comments_writer = (
        JsonlWriter(
            output_dir / "comments_events.jsonl",
            flush_every=jsonl_flush_every,
            flush_interval_seconds=jsonl_flush_interval_seconds,
        )
        if include_comments
        else None
    )
    message_index = 0
    kept = 0
    comments = 0
    subscription = _subscription(event_slug, include_comments, comments_parent_id)
    ssl_ctx = ssl._create_unverified_context() if insecure_ssl else None
    connect_kwargs: dict[str, Any] = {"ping_interval": None, "open_timeout": 10}
    if ssl_ctx is not None:
        connect_kwargs["ssl"] = ssl_ctx

    logger.info(
        "activity targets: "
        + ", ".join(f"{target.kind}:{target.slug}" for target in target_markets)
    )
    try:
        while not stop_event.is_set():
            try:
                async with websockets.connect(ACTIVITY_WS_URL, **connect_kwargs) as ws:
                    await ws.send(json.dumps(subscription, separators=(",", ":")))
                    logger.info(f"activity ws subscribed: event_slug={event_slug}")
                    ping_task = asyncio.create_task(_ping(ws))
                    try:
                        while not stop_event.is_set():
                            try:
                                raw = await asyncio.wait_for(ws.recv(), timeout=1.0)
                            except asyncio.TimeoutError:
                                continue
                            received_at_wall = datetime.now(timezone.utc)
                            received_at_monotonic_ns = time.monotonic_ns()
                            for item in _decode_items(str(raw)):
                                message_index += 1
                                if item.get("topic") == "comments":
                                    if comments_writer is not None:
                                        comments += 1
                                        comments_writer.write(
                                            {
                                                "event_slug": event_slug,
                                                "message_index": message_index,
                                                "received_at_wall": received_at_wall.isoformat(),
                                                "received_at_monotonic_ns": received_at_monotonic_ns,
                                                "raw_json": item,
                                            }
                                        )
                                    continue
                                if item.get("topic") != "activity" or item.get("type") != "trades":
                                    continue
                                target = match_activity_trade(item, target_markets)
                                if target is None:
                                    continue
                                kept += 1
                                trades_writer.write(
                                    normalized_activity_row(
                                        item=item,
                                        target=target,
                                        event_slug=event_slug,
                                        received_at_wall=received_at_wall,
                                        received_at_monotonic_ns=received_at_monotonic_ns,
                                        message_index=message_index,
                                    )
                                )
                    finally:
                        ping_task.cancel()
                        await asyncio.gather(ping_task, return_exceptions=True)
            except asyncio.CancelledError:
                raise
            except Exception as exc:
                if not stop_event.is_set():
                    logger.warning(f"activity ws reconnect: {exc}")
                    await asyncio.sleep(reconnect_delay_seconds)
    finally:
        logger.info(f"activity recorder stopped: trades={kept} comments={comments}")
        trades_writer.close()
        if comments_writer is not None:
            comments_writer.close()


@click.command()
@click.option("--event-slug", required=True, help="Polymarket event slug, e.g. lol-wb-we-2026-05-07")
@click.option(
    "--markets",
    default="auto",
    show_default=True,
    help="auto, or comma-separated target markets through game4",
)
@click.option(
    "--output-root",
    default="data/lpl_events",
    show_default=True,
    type=click.Path(file_okay=False, path_type=Path),
)
@click.option("--include-comments", is_flag=True, help="Also write comments_events.jsonl")
@click.option("--comments-parent-id", default="", help="Series parentEntityID for comments")
@click.option("--insecure-ssl", is_flag=True, help="Disable SSL verification for debugging")
@click.option("--reconnect-delay-seconds", default=3.0, show_default=True, type=float)
@click.option("--jsonl-flush-every", default=100, show_default=True, type=int)
@click.option("--jsonl-flush-interval-seconds", default=0.5, show_default=True, type=float)
def main(
    event_slug: str,
    markets: str,
    output_root: Path,
    include_comments: bool,
    comments_parent_id: str,
    insecure_ssl: bool,
    reconnect_delay_seconds: float,
    jsonl_flush_every: int,
    jsonl_flush_interval_seconds: float,
) -> None:
    load_dotenv()
    _setup_logging()
    if include_comments and not comments_parent_id:
        raise click.BadParameter("--comments-parent-id is required with --include-comments")
    stop_event = asyncio.Event()
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    for sig in (signal.SIGINT, signal.SIGTERM):
        loop.add_signal_handler(sig, stop_event.set)
    try:
        loop.run_until_complete(
            run_recorder(
                event_slug=event_slug,
                markets=_market_set(markets),
                output_root=output_root,
                include_comments=include_comments,
                comments_parent_id=comments_parent_id,
                insecure_ssl=insecure_ssl,
                reconnect_delay_seconds=reconnect_delay_seconds,
                jsonl_flush_every=jsonl_flush_every,
                jsonl_flush_interval_seconds=jsonl_flush_interval_seconds,
                stop_event=stop_event,
            )
        )
    finally:
        loop.close()


if __name__ == "__main__":
    main()
