2025-09-10 15:39:16 +08:00
|
|
|
import logging
|
2025-09-12 15:28:15 +08:00
|
|
|
from typing import List, Any, Dict, Tuple, Optional
|
2025-09-10 15:39:16 +08:00
|
|
|
|
|
|
|
|
from db_interpreter import OrderbookUpdate
|
2025-09-12 15:28:15 +08:00
|
|
|
from level_parser import parse_levels_including_zeros
|
2025-09-10 15:39:16 +08:00
|
|
|
from orderbook_manager import OrderbookManager
|
|
|
|
|
from metrics_calculator import MetricsCalculator
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class OHLCProcessor:
|
|
|
|
|
"""
|
2025-09-12 15:28:15 +08:00
|
|
|
Time-bucketed OHLC aggregator with gap-bar filling and metric hooks.
|
|
|
|
|
|
|
|
|
|
- Bars are aligned to fixed buckets of length `aggregate_window_seconds`.
|
|
|
|
|
- If there is a gap (no trades for one or more buckets), synthetic zero-volume
|
|
|
|
|
candles are emitted with O=H=L=C=last_close AND a flat metrics bucket is added.
|
|
|
|
|
- By default, the next bar's OPEN is the previous bar's CLOSE (configurable via
|
|
|
|
|
`carry_forward_open`).
|
2025-09-10 15:39:16 +08:00
|
|
|
"""
|
|
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
def __init__(self, aggregate_window_seconds: int, carry_forward_open: bool = True) -> None:
|
|
|
|
|
self.aggregate_window_seconds = int(aggregate_window_seconds)
|
|
|
|
|
self._bucket_ms = self.aggregate_window_seconds * 1000
|
|
|
|
|
|
|
|
|
|
self.carry_forward_open = carry_forward_open
|
|
|
|
|
|
|
|
|
|
self.current_bar: Optional[Dict[str, Any]] = None
|
|
|
|
|
self._current_bucket_index: Optional[int] = None
|
|
|
|
|
self._last_close: Optional[float] = None
|
|
|
|
|
|
2025-09-10 15:39:16 +08:00
|
|
|
self.trades_processed = 0
|
2025-09-12 15:28:15 +08:00
|
|
|
self.bars: List[Dict[str, Any]] = []
|
2025-09-10 15:39:16 +08:00
|
|
|
|
|
|
|
|
self._orderbook = OrderbookManager()
|
|
|
|
|
self._metrics = MetricsCalculator()
|
|
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
# -----------------------
|
|
|
|
|
# Internal helpers
|
|
|
|
|
# -----------------------
|
|
|
|
|
def _new_bar(self, bucket_start_ms: int, open_price: float) -> Dict[str, Any]:
|
|
|
|
|
return {
|
|
|
|
|
"timestamp_start": bucket_start_ms,
|
|
|
|
|
"timestamp_end": bucket_start_ms + self._bucket_ms,
|
|
|
|
|
"open": float(open_price),
|
|
|
|
|
"high": float(open_price),
|
|
|
|
|
"low": float(open_price),
|
|
|
|
|
"close": float(open_price),
|
|
|
|
|
"volume": 0.0,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
def _emit_gap_bars(self, from_index: int, to_index: int) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Emit empty buckets strictly between from_index and to_index.
|
|
|
|
|
Each synthetic bar has zero volume and O=H=L=C=last_close.
|
|
|
|
|
Also emit a flat metrics bucket for each gap.
|
|
|
|
|
"""
|
|
|
|
|
if self._last_close is None:
|
|
|
|
|
return
|
2025-09-10 15:39:16 +08:00
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
for bi in range(from_index + 1, to_index):
|
|
|
|
|
start_ms = bi * self._bucket_ms
|
|
|
|
|
gap_bar = self._new_bar(start_ms, self._last_close)
|
|
|
|
|
self.bars.append(gap_bar)
|
|
|
|
|
|
|
|
|
|
# metrics: add a flat bucket to keep OBI/CVD time-continuous
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.add_flat_bucket(start_ms, start_ms + self._bucket_ms)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"metrics add_flat_bucket error (ignored): {e}")
|
|
|
|
|
|
|
|
|
|
# -----------------------
|
|
|
|
|
# Public API
|
|
|
|
|
# -----------------------
|
2025-09-10 15:39:16 +08:00
|
|
|
def process_trades(self, trades: List[Tuple[Any, ...]]) -> None:
|
2025-09-12 15:28:15 +08:00
|
|
|
"""
|
|
|
|
|
trades: iterables like (trade_id, trade_id_str, price, size, side, timestamp_ms, ...)
|
|
|
|
|
timestamp_ms expected in milliseconds.
|
|
|
|
|
"""
|
|
|
|
|
if not trades:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Ensure time-ascending order; if upstream guarantees it, you can skip.
|
|
|
|
|
trades = sorted(trades, key=lambda t: int(t[5]))
|
|
|
|
|
|
2025-09-10 15:39:16 +08:00
|
|
|
for trade in trades:
|
|
|
|
|
trade_id, trade_id_str, price, size, side, timestamp_ms = trade[:6]
|
2025-09-12 15:28:15 +08:00
|
|
|
price = float(price)
|
|
|
|
|
size = float(size)
|
2025-09-10 15:39:16 +08:00
|
|
|
timestamp_ms = int(timestamp_ms)
|
2025-09-12 15:28:15 +08:00
|
|
|
self.trades_processed += 1
|
2025-09-10 15:39:16 +08:00
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
# Metrics driven by trades: update CVD
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.update_cvd_from_trade(side, size)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"CVD update error (ignored): {e}")
|
2025-09-10 15:39:16 +08:00
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
# Determine this trade's bucket
|
|
|
|
|
bucket_index = timestamp_ms // self._bucket_ms
|
|
|
|
|
bucket_start = bucket_index * self._bucket_ms
|
2025-09-10 15:39:16 +08:00
|
|
|
|
2025-09-12 15:28:15 +08:00
|
|
|
# New bucket?
|
|
|
|
|
if self._current_bucket_index is None or bucket_index != self._current_bucket_index:
|
|
|
|
|
# finalize prior bar
|
|
|
|
|
if self.current_bar is not None:
|
|
|
|
|
self.bars.append(self.current_bar)
|
|
|
|
|
self._last_close = self.current_bar["close"]
|
|
|
|
|
# finalize metrics for the prior bucket window
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.finalize_bucket()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"metrics finalize_bucket error (ignored): {e}")
|
|
|
|
|
|
|
|
|
|
# handle gaps
|
|
|
|
|
if self._current_bucket_index is not None and bucket_index > self._current_bucket_index + 1:
|
|
|
|
|
self._emit_gap_bars(self._current_bucket_index, bucket_index)
|
|
|
|
|
|
|
|
|
|
# pick open price policy
|
|
|
|
|
if self.carry_forward_open and self._last_close is not None:
|
|
|
|
|
open_for_new = self._last_close
|
|
|
|
|
else:
|
|
|
|
|
open_for_new = price
|
|
|
|
|
|
|
|
|
|
self.current_bar = self._new_bar(bucket_start, open_for_new)
|
|
|
|
|
self._current_bucket_index = bucket_index
|
|
|
|
|
|
|
|
|
|
# begin a new metrics bucket aligned to this bar window
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.begin_bucket(bucket_start, bucket_start + self._bucket_ms)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"metrics begin_bucket error (ignored): {e}")
|
|
|
|
|
|
|
|
|
|
# Update current bucket with this trade
|
|
|
|
|
b = self.current_bar
|
|
|
|
|
if b is None:
|
|
|
|
|
# Should not happen, but guard anyway
|
|
|
|
|
b = self._new_bar(bucket_start, price)
|
|
|
|
|
self.current_bar = b
|
|
|
|
|
self._current_bucket_index = bucket_index
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.begin_bucket(bucket_start, bucket_start + self._bucket_ms)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"metrics begin_bucket (guard) error (ignored): {e}")
|
|
|
|
|
|
|
|
|
|
b["high"] = max(b["high"], price)
|
|
|
|
|
b["low"] = min(b["low"], price)
|
|
|
|
|
b["close"] = price
|
|
|
|
|
b["volume"] += size
|
|
|
|
|
# keep timestamp_end snapped to bucket boundary
|
|
|
|
|
b["timestamp_end"] = bucket_start + self._bucket_ms
|
|
|
|
|
|
|
|
|
|
def flush(self) -> None:
|
|
|
|
|
"""Emit the in-progress bar (if any). Call at the end of a run/backtest."""
|
|
|
|
|
if self.current_bar is not None:
|
|
|
|
|
self.bars.append(self.current_bar)
|
|
|
|
|
self._last_close = self.current_bar["close"]
|
|
|
|
|
self.current_bar = None
|
|
|
|
|
# finalize any open metrics bucket
|
|
|
|
|
try:
|
|
|
|
|
self._metrics.finalize_bucket()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"metrics finalize_bucket on flush error (ignored): {e}")
|
2025-09-10 15:39:16 +08:00
|
|
|
|
|
|
|
|
def update_orderbook(self, ob_update: OrderbookUpdate) -> None:
|
2025-09-12 15:28:15 +08:00
|
|
|
"""
|
|
|
|
|
Apply orderbook deltas and refresh OBI metrics.
|
|
|
|
|
Call this frequently (on each OB update) so intra-bucket OBI highs/lows track the book.
|
|
|
|
|
"""
|
2025-09-10 15:39:16 +08:00
|
|
|
bids_updates = parse_levels_including_zeros(ob_update.bids)
|
|
|
|
|
asks_updates = parse_levels_including_zeros(ob_update.asks)
|
|
|
|
|
|
|
|
|
|
self._orderbook.apply_updates(bids_updates, asks_updates)
|
|
|
|
|
|
|
|
|
|
total_bids, total_asks = self._orderbook.get_total_volume()
|
2025-09-12 15:28:15 +08:00
|
|
|
try:
|
|
|
|
|
self._metrics.update_obi_from_book(total_bids, total_asks)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.debug(f"OBI update error (ignored): {e}")
|
|
|
|
|
|
|
|
|
|
# -----------------------
|
|
|
|
|
# UI-facing helpers
|
|
|
|
|
# -----------------------
|
|
|
|
|
def get_metrics_series(self):
|
|
|
|
|
"""
|
|
|
|
|
Returns a list of rows:
|
|
|
|
|
[timestamp_start_ms, timestamp_end_ms, obi_open, obi_high, obi_low, obi_close, cvd_value]
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
return self._metrics.get_series()
|
|
|
|
|
except Exception:
|
|
|
|
|
return []
|