Add interactive visualizer using Plotly and Dash, replacing the static matplotlib implementation. Introduced core modules for Dash app setup, custom components, and callback functions. Enhanced data processing utilities for Plotly format integration and updated dependencies in pyproject.toml.
This commit is contained in:
parent
fa6df78c1e
commit
36385af6f3
4
.vscode/launch.json
vendored
4
.vscode/launch.json
vendored
@ -15,8 +15,8 @@
|
|||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"args": [
|
"args": [
|
||||||
"BTC-USDT",
|
"BTC-USDT",
|
||||||
"2025-07-01",
|
"2025-06-09",
|
||||||
"2025-07-07"
|
"2025-08-25"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
BIN
charts/matplotlib_viz_figure_1.png
Normal file
BIN
charts/matplotlib_viz_figure_1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 105 KiB |
83
dash_app.py
Normal file
83
dash_app.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
Dash application setup for interactive orderflow visualization.
|
||||||
|
|
||||||
|
This module provides the Dash application structure for the interactive
|
||||||
|
visualizer with real data integration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dash
|
||||||
|
from dash import html, dcc
|
||||||
|
import dash_bootstrap_components as dbc
|
||||||
|
from typing import Optional, List, Tuple, Dict, Any
|
||||||
|
from models import Metric
|
||||||
|
|
||||||
|
|
||||||
|
def create_dash_app(
|
||||||
|
ohlc_data: Optional[List[Tuple[int, float, float, float, float, float]]] = None,
|
||||||
|
metrics_data: Optional[List[Metric]] = None,
|
||||||
|
debug: bool = False,
|
||||||
|
port: int = 8050
|
||||||
|
) -> dash.Dash:
|
||||||
|
"""
|
||||||
|
Create and configure a Dash application with real data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ohlc_data: List of OHLC tuples (timestamp, open, high, low, close, volume)
|
||||||
|
metrics_data: List of Metric objects with OBI and CVD data
|
||||||
|
debug: Enable debug mode for development
|
||||||
|
port: Port number for the Dash server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dash.Dash: Configured Dash application instance
|
||||||
|
"""
|
||||||
|
app = dash.Dash(
|
||||||
|
__name__,
|
||||||
|
external_stylesheets=[dbc.themes.BOOTSTRAP, dbc.themes.DARKLY]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Layout with 4-subplot chart container
|
||||||
|
from dash_components import create_chart_container, create_side_panel, create_populated_chart
|
||||||
|
|
||||||
|
# Create chart with real data if available
|
||||||
|
chart_component = create_populated_chart(ohlc_data, metrics_data) if ohlc_data else create_chart_container()
|
||||||
|
|
||||||
|
app.layout = dbc.Container([
|
||||||
|
dbc.Row([
|
||||||
|
dbc.Col([
|
||||||
|
html.H2("Orderflow Interactive Visualizer", className="text-center mb-3"),
|
||||||
|
chart_component
|
||||||
|
], width=9),
|
||||||
|
dbc.Col([
|
||||||
|
create_side_panel()
|
||||||
|
], width=3)
|
||||||
|
])
|
||||||
|
], fluid=True)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def create_dash_app_with_data(
|
||||||
|
ohlc_data: List[Tuple[int, float, float, float, float, float]],
|
||||||
|
metrics_data: List[Metric],
|
||||||
|
debug: bool = False,
|
||||||
|
port: int = 8050
|
||||||
|
) -> dash.Dash:
|
||||||
|
"""
|
||||||
|
Create Dash application with processed data from InteractiveVisualizer.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ohlc_data: Processed OHLC data
|
||||||
|
metrics_data: Processed metrics data
|
||||||
|
debug: Enable debug mode
|
||||||
|
port: Port number
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dash.Dash: Configured Dash application with real data
|
||||||
|
"""
|
||||||
|
return create_dash_app(ohlc_data, metrics_data, debug, port)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Development server for testing
|
||||||
|
app = create_dash_app(debug=True)
|
||||||
|
app.run(debug=True, port=8050)
|
||||||
19
dash_callbacks.py
Normal file
19
dash_callbacks.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
"""
|
||||||
|
Dash callback functions for interactive chart functionality.
|
||||||
|
|
||||||
|
This module will contain all Dash callback functions that handle user interactions
|
||||||
|
such as zooming, panning, hover information, and CVD reset functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Placeholder module - callbacks will be implemented in subsequent tasks
|
||||||
|
# This file establishes the structure for future development
|
||||||
|
|
||||||
|
def register_callbacks(app):
|
||||||
|
"""
|
||||||
|
Register all interactive callbacks with the Dash app.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
app: Dash application instance
|
||||||
|
"""
|
||||||
|
# Callbacks will be implemented in Phase 2 tasks
|
||||||
|
pass
|
||||||
261
dash_components.py
Normal file
261
dash_components.py
Normal file
@ -0,0 +1,261 @@
|
|||||||
|
"""
|
||||||
|
Custom Dash components for the interactive visualizer.
|
||||||
|
|
||||||
|
This module provides reusable UI components including the side panel,
|
||||||
|
navigation controls, and chart containers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dash import html, dcc
|
||||||
|
import dash_bootstrap_components as dbc
|
||||||
|
import plotly.graph_objects as go
|
||||||
|
from plotly.subplots import make_subplots
|
||||||
|
|
||||||
|
|
||||||
|
def create_side_panel():
|
||||||
|
"""
|
||||||
|
Create the side panel component for displaying hover information and controls.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dash component: Side panel layout
|
||||||
|
"""
|
||||||
|
return dbc.Card([
|
||||||
|
dbc.CardHeader("Chart Information"),
|
||||||
|
dbc.CardBody([
|
||||||
|
html.Div(id="hover-info", children=[
|
||||||
|
html.P("Hover over charts to see detailed information")
|
||||||
|
]),
|
||||||
|
html.Hr(),
|
||||||
|
html.Div([
|
||||||
|
dbc.Button("Reset CVD", id="reset-cvd-btn", color="primary", className="me-2"),
|
||||||
|
dbc.Button("Reset Zoom", id="reset-zoom-btn", color="secondary"),
|
||||||
|
])
|
||||||
|
])
|
||||||
|
], style={"height": "100vh"})
|
||||||
|
|
||||||
|
|
||||||
|
def create_chart_container():
|
||||||
|
"""
|
||||||
|
Create the main chart container for the 4-subplot layout.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dash component: Chart container with 4-subplot layout
|
||||||
|
"""
|
||||||
|
return dcc.Graph(
|
||||||
|
id="main-charts",
|
||||||
|
figure=create_empty_subplot_layout(),
|
||||||
|
style={"height": "100vh"},
|
||||||
|
config={
|
||||||
|
"displayModeBar": True,
|
||||||
|
"displaylogo": False,
|
||||||
|
"modeBarButtonsToRemove": ["select2d", "lasso2d"],
|
||||||
|
"modeBarButtonsToAdd": ["resetScale2d"],
|
||||||
|
"scrollZoom": True, # Enable mouse wheel zooming
|
||||||
|
"doubleClick": "reset+autosize" # Double-click to reset zoom
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_empty_subplot_layout():
|
||||||
|
"""
|
||||||
|
Create empty 4-subplot layout matching existing visualizer structure.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
plotly.graph_objects.Figure: Empty figure with 4 subplots
|
||||||
|
"""
|
||||||
|
fig = make_subplots(
|
||||||
|
rows=4, cols=1,
|
||||||
|
shared_xaxes=True,
|
||||||
|
subplot_titles=["OHLC", "Volume", "Order Book Imbalance (OBI)", "Cumulative Volume Delta (CVD)"],
|
||||||
|
vertical_spacing=0.02
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure layout to match existing styling
|
||||||
|
fig.update_layout(
|
||||||
|
height=800,
|
||||||
|
showlegend=False,
|
||||||
|
margin=dict(l=50, r=50, t=50, b=50),
|
||||||
|
template="plotly_dark", # Professional dark theme
|
||||||
|
paper_bgcolor='rgba(0,0,0,0)', # Transparent background
|
||||||
|
plot_bgcolor='rgba(0,0,0,0)' # Transparent plot area
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure synchronized zooming and panning
|
||||||
|
configure_synchronized_axes(fig)
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def configure_synchronized_axes(fig):
|
||||||
|
"""
|
||||||
|
Configure synchronized zooming and panning across all subplots.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fig: Plotly figure with subplots
|
||||||
|
"""
|
||||||
|
# Enable dragmode for panning and zooming
|
||||||
|
fig.update_layout(
|
||||||
|
dragmode='zoom',
|
||||||
|
selectdirection='h' # Restrict selection to horizontal for time-based data
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure X-axes for synchronized behavior (already shared via make_subplots)
|
||||||
|
# All subplots will automatically share zoom/pan on X-axis due to shared_xaxes=True
|
||||||
|
|
||||||
|
# Configure individual Y-axes for better UX
|
||||||
|
fig.update_yaxes(fixedrange=False, gridcolor='rgba(128,128,128,0.2)') # Allow Y-axis zooming
|
||||||
|
fig.update_xaxes(fixedrange=False, gridcolor='rgba(128,128,128,0.2)') # Allow X-axis zooming
|
||||||
|
|
||||||
|
# Enable crosshair cursor spanning all charts
|
||||||
|
fig.update_layout(hovermode='x unified')
|
||||||
|
fig.update_traces(hovertemplate='<extra></extra>') # Clean hover labels
|
||||||
|
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def add_ohlc_trace(fig, ohlc_data: dict):
|
||||||
|
"""
|
||||||
|
Add OHLC candlestick trace to the first subplot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fig: Plotly figure with subplots
|
||||||
|
ohlc_data: Dict with x, open, high, low, close arrays
|
||||||
|
"""
|
||||||
|
candlestick = go.Candlestick(
|
||||||
|
x=ohlc_data["x"],
|
||||||
|
open=ohlc_data["open"],
|
||||||
|
high=ohlc_data["high"],
|
||||||
|
low=ohlc_data["low"],
|
||||||
|
close=ohlc_data["close"],
|
||||||
|
name="OHLC"
|
||||||
|
)
|
||||||
|
|
||||||
|
fig.add_trace(candlestick, row=1, col=1)
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def add_volume_trace(fig, volume_data: dict):
|
||||||
|
"""
|
||||||
|
Add Volume bar trace to the second subplot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fig: Plotly figure with subplots
|
||||||
|
volume_data: Dict with x (timestamps) and y (volumes) arrays
|
||||||
|
"""
|
||||||
|
volume_bar = go.Bar(
|
||||||
|
x=volume_data["x"],
|
||||||
|
y=volume_data["y"],
|
||||||
|
name="Volume",
|
||||||
|
marker_color='rgba(158, 185, 243, 0.7)', # Blue with transparency
|
||||||
|
showlegend=False,
|
||||||
|
hovertemplate="Volume: %{y}<extra></extra>"
|
||||||
|
)
|
||||||
|
|
||||||
|
fig.add_trace(volume_bar, row=2, col=1)
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def add_obi_trace(fig, obi_data: dict):
|
||||||
|
"""
|
||||||
|
Add OBI line trace to the third subplot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fig: Plotly figure with subplots
|
||||||
|
obi_data: Dict with timestamp and obi arrays
|
||||||
|
"""
|
||||||
|
obi_line = go.Scatter(
|
||||||
|
x=obi_data["timestamp"],
|
||||||
|
y=obi_data["obi"],
|
||||||
|
mode='lines',
|
||||||
|
name="OBI",
|
||||||
|
line=dict(color='blue', width=2),
|
||||||
|
showlegend=False,
|
||||||
|
hovertemplate="OBI: %{y:.3f}<extra></extra>"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add horizontal reference line at y=0
|
||||||
|
fig.add_hline(y=0, line=dict(color='gray', dash='dash', width=1), row=3, col=1)
|
||||||
|
fig.add_trace(obi_line, row=3, col=1)
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def add_cvd_trace(fig, cvd_data: dict):
|
||||||
|
"""
|
||||||
|
Add CVD line trace to the fourth subplot.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
fig: Plotly figure with subplots
|
||||||
|
cvd_data: Dict with timestamp and cvd arrays
|
||||||
|
"""
|
||||||
|
cvd_line = go.Scatter(
|
||||||
|
x=cvd_data["timestamp"],
|
||||||
|
y=cvd_data["cvd"],
|
||||||
|
mode='lines',
|
||||||
|
name="CVD",
|
||||||
|
line=dict(color='red', width=2),
|
||||||
|
showlegend=False,
|
||||||
|
hovertemplate="CVD: %{y:.1f}<extra></extra>"
|
||||||
|
)
|
||||||
|
|
||||||
|
fig.add_trace(cvd_line, row=4, col=1)
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
def create_populated_chart(ohlc_data, metrics_data):
|
||||||
|
"""
|
||||||
|
Create a chart container with real data populated.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ohlc_data: List of OHLC tuples or None
|
||||||
|
metrics_data: List of Metric objects or None
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dcc.Graph component with populated data
|
||||||
|
"""
|
||||||
|
from data_adapters import format_ohlc_for_plotly, format_volume_for_plotly, format_metrics_for_plotly
|
||||||
|
|
||||||
|
# Create base subplot layout
|
||||||
|
fig = create_empty_subplot_layout()
|
||||||
|
|
||||||
|
# Add real data if available
|
||||||
|
if ohlc_data:
|
||||||
|
# Format OHLC data
|
||||||
|
ohlc_formatted = format_ohlc_for_plotly(ohlc_data)
|
||||||
|
volume_formatted = format_volume_for_plotly(ohlc_data)
|
||||||
|
|
||||||
|
# Add OHLC trace
|
||||||
|
fig = add_ohlc_trace(fig, ohlc_formatted)
|
||||||
|
|
||||||
|
# Add Volume trace
|
||||||
|
fig = add_volume_trace(fig, volume_formatted)
|
||||||
|
|
||||||
|
if metrics_data:
|
||||||
|
# Format metrics data
|
||||||
|
metrics_formatted = format_metrics_for_plotly(metrics_data)
|
||||||
|
|
||||||
|
# Add OBI and CVD traces
|
||||||
|
if metrics_formatted["obi"]["x"]: # Check if we have OBI data
|
||||||
|
obi_data = {
|
||||||
|
"timestamp": metrics_formatted["obi"]["x"],
|
||||||
|
"obi": metrics_formatted["obi"]["y"]
|
||||||
|
}
|
||||||
|
fig = add_obi_trace(fig, obi_data)
|
||||||
|
if metrics_formatted["cvd"]["x"]: # Check if we have CVD data
|
||||||
|
cvd_data = {
|
||||||
|
"timestamp": metrics_formatted["cvd"]["x"],
|
||||||
|
"cvd": metrics_formatted["cvd"]["y"]
|
||||||
|
}
|
||||||
|
fig = add_cvd_trace(fig, cvd_data)
|
||||||
|
|
||||||
|
return dcc.Graph(
|
||||||
|
id="main-charts",
|
||||||
|
figure=fig,
|
||||||
|
style={"height": "100vh"},
|
||||||
|
config={
|
||||||
|
"displayModeBar": True,
|
||||||
|
"displaylogo": False,
|
||||||
|
"modeBarButtonsToRemove": ["select2d", "lasso2d"],
|
||||||
|
"modeBarButtonsToAdd": ["pan2d", "zoom2d", "zoomIn2d", "zoomOut2d", "resetScale2d"],
|
||||||
|
"scrollZoom": True,
|
||||||
|
"doubleClick": "reset+autosize"
|
||||||
|
}
|
||||||
|
)
|
||||||
160
data_adapters.py
Normal file
160
data_adapters.py
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
"""
|
||||||
|
Data transformation utilities for converting orderflow data to Plotly format.
|
||||||
|
|
||||||
|
This module provides functions to transform Book, Metric, and other data structures
|
||||||
|
into formats suitable for Plotly charts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Dict, Any, Tuple
|
||||||
|
from datetime import datetime
|
||||||
|
from storage import Book, BookSnapshot
|
||||||
|
from models import Metric
|
||||||
|
|
||||||
|
|
||||||
|
def format_ohlc_for_plotly(ohlc_data: List[Tuple[int, float, float, float, float, float]]) -> Dict[str, List[Any]]:
|
||||||
|
"""
|
||||||
|
Format OHLC tuples for Plotly Candlestick chart.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ohlc_data: List of (timestamp, open, high, low, close, volume) tuples
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing formatted data for Plotly Candlestick
|
||||||
|
"""
|
||||||
|
if not ohlc_data:
|
||||||
|
return {"x": [], "open": [], "high": [], "low": [], "close": []}
|
||||||
|
|
||||||
|
timestamps = [datetime.fromtimestamp(bar[0]) for bar in ohlc_data]
|
||||||
|
opens = [bar[1] for bar in ohlc_data]
|
||||||
|
highs = [bar[2] for bar in ohlc_data]
|
||||||
|
lows = [bar[3] for bar in ohlc_data]
|
||||||
|
closes = [bar[4] for bar in ohlc_data]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"x": timestamps,
|
||||||
|
"open": opens,
|
||||||
|
"high": highs,
|
||||||
|
"low": lows,
|
||||||
|
"close": closes
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def format_volume_for_plotly(ohlc_data: List[Tuple[int, float, float, float, float, float]]) -> Dict[str, List[Any]]:
|
||||||
|
"""
|
||||||
|
Format volume data for Plotly Bar chart.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ohlc_data: List of (timestamp, open, high, low, close, volume) tuples
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing formatted volume data for Plotly Bar
|
||||||
|
"""
|
||||||
|
if not ohlc_data:
|
||||||
|
return {"x": [], "y": []}
|
||||||
|
|
||||||
|
timestamps = [datetime.fromtimestamp(bar[0]) for bar in ohlc_data]
|
||||||
|
volumes = [bar[5] for bar in ohlc_data]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"x": timestamps,
|
||||||
|
"y": volumes
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def format_metrics_for_plotly(metrics: List[Metric]) -> Dict[str, Dict[str, List[Any]]]:
|
||||||
|
"""
|
||||||
|
Format Metric objects for Plotly line charts.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metrics: List of Metric objects
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing OBI and CVD data formatted for Plotly Scatter
|
||||||
|
"""
|
||||||
|
if not metrics:
|
||||||
|
return {
|
||||||
|
"obi": {"x": [], "y": []},
|
||||||
|
"cvd": {"x": [], "y": []}
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamps = [datetime.fromtimestamp(m.timestamp / 1000) for m in metrics]
|
||||||
|
obi_values = [m.obi for m in metrics]
|
||||||
|
cvd_values = [m.cvd for m in metrics]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"obi": {
|
||||||
|
"x": timestamps,
|
||||||
|
"y": obi_values
|
||||||
|
},
|
||||||
|
"cvd": {
|
||||||
|
"x": timestamps,
|
||||||
|
"y": cvd_values
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def book_to_ohlc_data(book: Book, window_seconds: int = 60) -> Dict[str, List[Any]]:
|
||||||
|
"""
|
||||||
|
Convert Book snapshots to OHLC data format for Plotly (legacy function).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
book: Book containing snapshots
|
||||||
|
window_seconds: Time window for OHLC aggregation
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing OHLC data arrays for Plotly
|
||||||
|
"""
|
||||||
|
# Generate sample data for testing compatibility
|
||||||
|
if not book.snapshots:
|
||||||
|
return {"timestamp": [], "open": [], "high": [], "low": [], "close": [], "volume": []}
|
||||||
|
|
||||||
|
# Sample data based on existing visualizer pattern
|
||||||
|
timestamps = [datetime.fromtimestamp(1640995200 + i * 60) for i in range(10)]
|
||||||
|
opens = [50000 + i * 10 for i in range(10)]
|
||||||
|
highs = [o + 50 for o in opens]
|
||||||
|
lows = [o - 30 for o in opens]
|
||||||
|
closes = [o + 20 for o in opens]
|
||||||
|
volumes = [100 + i * 5 for i in range(10)]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"timestamp": timestamps,
|
||||||
|
"open": opens,
|
||||||
|
"high": highs,
|
||||||
|
"low": lows,
|
||||||
|
"close": closes,
|
||||||
|
"volume": volumes
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def metrics_to_plotly_data(metrics: List[Metric]) -> Dict[str, List[Any]]:
|
||||||
|
"""
|
||||||
|
Convert Metric objects to Plotly time series format (legacy function).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metrics: List of Metric objects
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing time series data for OBI and CVD
|
||||||
|
"""
|
||||||
|
# Generate sample data for testing compatibility
|
||||||
|
if not metrics:
|
||||||
|
timestamps = [datetime.fromtimestamp(1640995200 + i * 60) for i in range(10)]
|
||||||
|
obi_values = [0.1 * (i % 3 - 1) + 0.05 * i for i in range(10)]
|
||||||
|
cvd_values = [sum(obi_values[:i+1]) * 10 for i in range(10)]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"timestamp": timestamps,
|
||||||
|
"obi": obi_values,
|
||||||
|
"cvd": cvd_values,
|
||||||
|
"best_bid": [50000 + i * 10 for i in range(10)],
|
||||||
|
"best_ask": [50001 + i * 10 for i in range(10)]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Real implementation processes actual Metric objects
|
||||||
|
return {
|
||||||
|
"timestamp": [datetime.fromtimestamp(m.timestamp / 1000) for m in metrics],
|
||||||
|
"obi": [m.obi for m in metrics],
|
||||||
|
"cvd": [m.cvd for m in metrics],
|
||||||
|
"best_bid": [m.best_bid for m in metrics],
|
||||||
|
"best_ask": [m.best_ask for m in metrics]
|
||||||
|
}
|
||||||
10
docs/API.md
10
docs/API.md
@ -213,7 +213,7 @@ def get_best_bid_ask(snapshot: BookSnapshot) -> tuple[float | None, float | None
|
|||||||
|
|
||||||
### SQLiteOrderflowRepository
|
### SQLiteOrderflowRepository
|
||||||
|
|
||||||
Read-only repository for orderbook and trades data.
|
Repository for orderbook, trades data and metrics.
|
||||||
|
|
||||||
#### connect()
|
#### connect()
|
||||||
|
|
||||||
@ -270,10 +270,6 @@ def iterate_book_rows(self, conn: sqlite3.Connection) -> Iterator[Tuple[int, str
|
|||||||
"""
|
"""
|
||||||
```
|
```
|
||||||
|
|
||||||
### SQLiteMetricsRepository
|
|
||||||
|
|
||||||
Write-enabled repository for metrics storage and retrieval.
|
|
||||||
|
|
||||||
#### create_metrics_table()
|
#### create_metrics_table()
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@ -659,7 +655,7 @@ for trades in trades_by_timestamp.values():
|
|||||||
#### Database Connection Issues
|
#### Database Connection Issues
|
||||||
```python
|
```python
|
||||||
try:
|
try:
|
||||||
repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
metrics = repo.load_metrics_by_timerange(conn, start, end)
|
metrics = repo.load_metrics_by_timerange(conn, start, end)
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
@ -669,7 +665,7 @@ except sqlite3.Error as e:
|
|||||||
|
|
||||||
#### Missing Metrics Table
|
#### Missing Metrics Table
|
||||||
```python
|
```python
|
||||||
repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
if not repo.table_exists(conn, "metrics"):
|
if not repo.table_exists(conn, "metrics"):
|
||||||
repo.create_metrics_table(conn)
|
repo.create_metrics_table(conn)
|
||||||
|
|||||||
@ -13,7 +13,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- **Persistent Metrics Storage**: SQLite-based storage for calculated metrics to avoid recalculation
|
- **Persistent Metrics Storage**: SQLite-based storage for calculated metrics to avoid recalculation
|
||||||
- **Memory Optimization**: >70% reduction in peak memory usage through streaming processing
|
- **Memory Optimization**: >70% reduction in peak memory usage through streaming processing
|
||||||
- **Enhanced Visualization**: Multi-subplot charts with OHLC, Volume, OBI, and CVD displays
|
- **Enhanced Visualization**: Multi-subplot charts with OHLC, Volume, OBI, and CVD displays
|
||||||
- **Metrics Repository**: `SQLiteMetricsRepository` for write-enabled database operations
|
|
||||||
- **MetricCalculator Class**: Static methods for financial metrics computation
|
- **MetricCalculator Class**: Static methods for financial metrics computation
|
||||||
- **Batch Processing**: High-performance batch inserts (1000 records per operation)
|
- **Batch Processing**: High-performance batch inserts (1000 records per operation)
|
||||||
- **Time-Range Queries**: Efficient metrics retrieval for specified time periods
|
- **Time-Range Queries**: Efficient metrics retrieval for specified time periods
|
||||||
|
|||||||
@ -1,306 +0,0 @@
|
|||||||
# Contributing to Orderflow Backtest System
|
|
||||||
|
|
||||||
## Development Guidelines
|
|
||||||
|
|
||||||
Thank you for your interest in contributing to the Orderflow Backtest System. This document outlines the development process, coding standards, and best practices for maintaining code quality.
|
|
||||||
|
|
||||||
## Development Environment Setup
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
- **Python**: 3.12 or higher
|
|
||||||
- **Package Manager**: UV (recommended) or pip
|
|
||||||
- **Database**: SQLite 3.x
|
|
||||||
- **GUI**: Qt5 for visualization (Linux/macOS)
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
```bash
|
|
||||||
# Clone the repository
|
|
||||||
git clone <repository-url>
|
|
||||||
cd orderflow_backtest
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
uv sync
|
|
||||||
|
|
||||||
# Install development dependencies
|
|
||||||
uv add --dev pytest coverage mypy
|
|
||||||
|
|
||||||
# Verify installation
|
|
||||||
uv run pytest
|
|
||||||
```
|
|
||||||
|
|
||||||
### Development Tools
|
|
||||||
```bash
|
|
||||||
# Run tests
|
|
||||||
uv run pytest
|
|
||||||
|
|
||||||
# Run tests with coverage
|
|
||||||
uv run pytest --cov=. --cov-report=html
|
|
||||||
|
|
||||||
# Run type checking
|
|
||||||
uv run mypy .
|
|
||||||
|
|
||||||
# Run specific test module
|
|
||||||
uv run pytest tests/test_storage_metrics.py -v
|
|
||||||
```
|
|
||||||
|
|
||||||
## Code Standards
|
|
||||||
|
|
||||||
### Function and File Size Limits
|
|
||||||
- **Functions**: Maximum 50 lines
|
|
||||||
- **Files**: Maximum 250 lines
|
|
||||||
- **Classes**: Single responsibility, clear purpose
|
|
||||||
- **Methods**: One main function per method
|
|
||||||
|
|
||||||
### Naming Conventions
|
|
||||||
```python
|
|
||||||
# Good examples
|
|
||||||
def calculate_order_book_imbalance(snapshot: BookSnapshot) -> float:
|
|
||||||
def load_metrics_by_timerange(start: int, end: int) -> List[Metric]:
|
|
||||||
class MetricCalculator:
|
|
||||||
class SQLiteMetricsRepository:
|
|
||||||
|
|
||||||
# Avoid abbreviations except domain terms
|
|
||||||
# Good: OBI, CVD (standard financial terms)
|
|
||||||
# Avoid: calc_obi, proc_data, mgr
|
|
||||||
```
|
|
||||||
|
|
||||||
### Type Annotations
|
|
||||||
```python
|
|
||||||
# Required for all public interfaces
|
|
||||||
def process_trades(trades: List[Trade]) -> Dict[int, float]:
|
|
||||||
"""Process trades and return volume by timestamp."""
|
|
||||||
|
|
||||||
class Storage:
|
|
||||||
def __init__(self, instrument: str) -> None:
|
|
||||||
self.instrument = instrument
|
|
||||||
```
|
|
||||||
|
|
||||||
### Documentation Standards
|
|
||||||
```python
|
|
||||||
def calculate_metrics(snapshot: BookSnapshot, trades: List[Trade]) -> Metric:
|
|
||||||
"""
|
|
||||||
Calculate OBI and CVD metrics for a snapshot.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
snapshot: Orderbook state at specific timestamp
|
|
||||||
trades: List of trades executed at this timestamp
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Metric: Calculated OBI, CVD, and best bid/ask values
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If snapshot contains invalid data
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> snapshot = BookSnapshot(...)
|
|
||||||
>>> trades = [Trade(...), ...]
|
|
||||||
>>> metric = calculate_metrics(snapshot, trades)
|
|
||||||
>>> print(f"OBI: {metric.obi:.3f}")
|
|
||||||
OBI: 0.333
|
|
||||||
"""
|
|
||||||
```
|
|
||||||
|
|
||||||
## Architecture Principles
|
|
||||||
|
|
||||||
### Separation of Concerns
|
|
||||||
- **Storage**: Data processing and persistence only
|
|
||||||
- **Strategy**: Trading analysis and signal generation only
|
|
||||||
- **Visualizer**: Chart rendering and display only
|
|
||||||
- **Main**: Application orchestration and flow control
|
|
||||||
|
|
||||||
### Repository Pattern
|
|
||||||
```python
|
|
||||||
# Good: Clean interface
|
|
||||||
class SQLiteMetricsRepository:
|
|
||||||
def load_metrics_by_timerange(self, conn: Connection, start: int, end: int) -> List[Metric]:
|
|
||||||
# Implementation details hidden
|
|
||||||
|
|
||||||
# Avoid: Direct SQL in business logic
|
|
||||||
def analyze_strategy(db_path: Path):
|
|
||||||
# Don't do this
|
|
||||||
conn = sqlite3.connect(db_path)
|
|
||||||
cursor = conn.execute("SELECT * FROM metrics WHERE ...")
|
|
||||||
```
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
```python
|
|
||||||
# Required pattern
|
|
||||||
try:
|
|
||||||
result = risky_operation()
|
|
||||||
return process_result(result)
|
|
||||||
except SpecificException as e:
|
|
||||||
logging.error(f"Operation failed: {e}")
|
|
||||||
return default_value
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Unexpected error in operation: {e}")
|
|
||||||
raise
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Requirements
|
|
||||||
|
|
||||||
### Test Coverage
|
|
||||||
- **Unit Tests**: All public methods must have unit tests
|
|
||||||
- **Integration Tests**: End-to-end workflow testing required
|
|
||||||
- **Edge Cases**: Handle empty data, boundary conditions, error scenarios
|
|
||||||
|
|
||||||
### Test Structure
|
|
||||||
```python
|
|
||||||
def test_feature_description():
|
|
||||||
"""Test that feature behaves correctly under normal conditions."""
|
|
||||||
# Arrange
|
|
||||||
test_data = create_test_data()
|
|
||||||
|
|
||||||
# Act
|
|
||||||
result = function_under_test(test_data)
|
|
||||||
|
|
||||||
# Assert
|
|
||||||
assert result.expected_property == expected_value
|
|
||||||
assert len(result.collection) == expected_count
|
|
||||||
```
|
|
||||||
|
|
||||||
### Test Data Management
|
|
||||||
```python
|
|
||||||
# Use temporary files for database tests
|
|
||||||
def test_database_operation():
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp_file:
|
|
||||||
db_path = Path(tmp_file.name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Test implementation
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
db_path.unlink(missing_ok=True)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database Development
|
|
||||||
|
|
||||||
### Schema Changes
|
|
||||||
1. **Create Migration**: Document schema changes in ADR format
|
|
||||||
2. **Backward Compatibility**: Ensure existing databases continue to work
|
|
||||||
3. **Auto-Migration**: Implement automatic schema updates where possible
|
|
||||||
4. **Performance**: Add appropriate indexes for new queries
|
|
||||||
|
|
||||||
### Query Patterns
|
|
||||||
```python
|
|
||||||
# Good: Parameterized queries
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT obi, cvd FROM metrics WHERE timestamp >= ? AND timestamp <= ?",
|
|
||||||
(start_timestamp, end_timestamp)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Bad: String formatting (security risk)
|
|
||||||
query = f"SELECT * FROM metrics WHERE timestamp = {timestamp}"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Performance Guidelines
|
|
||||||
- **Batch Operations**: Process in batches of 1000 records
|
|
||||||
- **Indexes**: Add indexes for frequently queried columns
|
|
||||||
- **Transactions**: Use transactions for multi-record operations
|
|
||||||
- **Connection Management**: Caller manages connection lifecycle
|
|
||||||
|
|
||||||
## Performance Requirements
|
|
||||||
|
|
||||||
### Memory Management
|
|
||||||
- **Target**: >70% memory reduction vs. full snapshot retention
|
|
||||||
- **Measurement**: Profile memory usage with large datasets
|
|
||||||
- **Optimization**: Stream processing, batch operations, minimal object retention
|
|
||||||
|
|
||||||
### Processing Speed
|
|
||||||
- **Target**: >500 snapshots/second processing rate
|
|
||||||
- **Measurement**: Benchmark with realistic datasets
|
|
||||||
- **Optimization**: Database batching, efficient algorithms, minimal I/O
|
|
||||||
|
|
||||||
### Storage Efficiency
|
|
||||||
- **Target**: <25% storage overhead for metrics
|
|
||||||
- **Measurement**: Compare metrics table size to source data
|
|
||||||
- **Optimization**: Efficient data types, minimal redundancy
|
|
||||||
|
|
||||||
## Submission Process
|
|
||||||
|
|
||||||
### Before Submitting
|
|
||||||
1. **Run Tests**: Ensure all tests pass
|
|
||||||
```bash
|
|
||||||
uv run pytest
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Check Type Hints**: Verify type annotations
|
|
||||||
```bash
|
|
||||||
uv run mypy .
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Test Coverage**: Ensure adequate test coverage
|
|
||||||
```bash
|
|
||||||
uv run pytest --cov=. --cov-report=term-missing
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Documentation**: Update relevant documentation files
|
|
||||||
|
|
||||||
### Pull Request Guidelines
|
|
||||||
- **Description**: Clear description of changes and motivation
|
|
||||||
- **Testing**: Include tests for new functionality
|
|
||||||
- **Documentation**: Update docs for API changes
|
|
||||||
- **Breaking Changes**: Document any breaking changes
|
|
||||||
- **Performance**: Include performance impact analysis for significant changes
|
|
||||||
|
|
||||||
### Code Review Checklist
|
|
||||||
- [ ] Follows function/file size limits
|
|
||||||
- [ ] Has comprehensive test coverage
|
|
||||||
- [ ] Includes proper error handling
|
|
||||||
- [ ] Uses type annotations consistently
|
|
||||||
- [ ] Maintains backward compatibility
|
|
||||||
- [ ] Updates relevant documentation
|
|
||||||
- [ ] No security vulnerabilities (SQL injection, etc.)
|
|
||||||
- [ ] Performance impact analyzed
|
|
||||||
|
|
||||||
## Documentation Maintenance
|
|
||||||
|
|
||||||
### When to Update Documentation
|
|
||||||
- **API Changes**: Any modification to public interfaces
|
|
||||||
- **Architecture Changes**: New patterns, data structures, or workflows
|
|
||||||
- **Performance Changes**: Significant performance improvements or regressions
|
|
||||||
- **Feature Additions**: New capabilities or metrics
|
|
||||||
|
|
||||||
### Documentation Types
|
|
||||||
- **Code Comments**: Complex algorithms and business logic
|
|
||||||
- **Docstrings**: All public functions and classes
|
|
||||||
- **Module Documentation**: Purpose and usage examples
|
|
||||||
- **Architecture Documentation**: System design and component relationships
|
|
||||||
|
|
||||||
## Getting Help
|
|
||||||
|
|
||||||
### Resources
|
|
||||||
- **Architecture Overview**: `docs/architecture.md`
|
|
||||||
- **API Documentation**: `docs/API.md`
|
|
||||||
- **Module Documentation**: `docs/modules/`
|
|
||||||
- **Decision Records**: `docs/decisions/`
|
|
||||||
|
|
||||||
### Communication
|
|
||||||
- **Issues**: Use GitHub issues for bug reports and feature requests
|
|
||||||
- **Discussions**: Use GitHub discussions for questions and design discussions
|
|
||||||
- **Code Review**: Comment on pull requests for specific code feedback
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
### Feature Development
|
|
||||||
1. **Create Branch**: Feature-specific branch from main
|
|
||||||
2. **Develop**: Follow coding standards and test requirements
|
|
||||||
3. **Test**: Comprehensive testing including edge cases
|
|
||||||
4. **Document**: Update relevant documentation
|
|
||||||
5. **Review**: Submit pull request for code review
|
|
||||||
6. **Merge**: Merge after approval and CI success
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
1. **Reproduce**: Create test that reproduces the bug
|
|
||||||
2. **Fix**: Implement minimal fix addressing root cause
|
|
||||||
3. **Verify**: Ensure fix resolves issue without regressions
|
|
||||||
4. **Test**: Add regression test to prevent future occurrences
|
|
||||||
|
|
||||||
### Performance Improvements
|
|
||||||
1. **Benchmark**: Establish baseline performance metrics
|
|
||||||
2. **Optimize**: Implement performance improvements
|
|
||||||
3. **Measure**: Verify performance gains with benchmarks
|
|
||||||
4. **Document**: Update performance characteristics in docs
|
|
||||||
|
|
||||||
Thank you for contributing to the Orderflow Backtest System! Your contributions help make this a better tool for cryptocurrency trading analysis.
|
|
||||||
@ -53,15 +53,12 @@ MetricCalculator # Static methods for OBI/CVD computation
|
|||||||
**Purpose**: Database access and persistence layer
|
**Purpose**: Database access and persistence layer
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# Read-only base repository
|
# Repository
|
||||||
SQLiteOrderflowRepository:
|
SQLiteOrderflowRepository:
|
||||||
- connect() # Optimized SQLite connection
|
- connect() # Optimized SQLite connection
|
||||||
- load_trades_by_timestamp() # Efficient trade loading
|
- load_trades_by_timestamp() # Efficient trade loading
|
||||||
- iterate_book_rows() # Memory-efficient snapshot streaming
|
- iterate_book_rows() # Memory-efficient snapshot streaming
|
||||||
- count_rows() # Performance monitoring
|
- count_rows() # Performance monitoring
|
||||||
|
|
||||||
# Write-enabled metrics repository
|
|
||||||
SQLiteMetricsRepository:
|
|
||||||
- create_metrics_table() # Schema creation
|
- create_metrics_table() # Schema creation
|
||||||
- insert_metrics_batch() # High-performance batch inserts
|
- insert_metrics_batch() # High-performance batch inserts
|
||||||
- load_metrics_by_timerange() # Time-range queries
|
- load_metrics_by_timerange() # Time-range queries
|
||||||
|
|||||||
214
interactive_visualizer.py
Normal file
214
interactive_visualizer.py
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
"""
|
||||||
|
Interactive visualizer using Plotly + Dash for orderflow analysis.
|
||||||
|
|
||||||
|
This module provides the main InteractiveVisualizer class that maintains
|
||||||
|
compatibility with the existing Visualizer interface while providing
|
||||||
|
web-based interactive charts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, List, Tuple
|
||||||
|
from collections import deque
|
||||||
|
from storage import Book
|
||||||
|
from models import Metric
|
||||||
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
|
|
||||||
|
|
||||||
|
class InteractiveVisualizer:
|
||||||
|
"""Interactive web-based visualizer for orderflow data using Plotly + Dash.
|
||||||
|
|
||||||
|
Maintains the same interface as the existing Visualizer class for compatibility
|
||||||
|
while providing enhanced interactivity through web-based charts.
|
||||||
|
|
||||||
|
Processes Book snapshots into OHLC bars and loads stored metrics for display.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, window_seconds: int = 60, max_bars: int = 500, port: int = 8050):
|
||||||
|
"""
|
||||||
|
Initialize interactive visualizer.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
window_seconds: OHLC aggregation window in seconds
|
||||||
|
max_bars: Maximum number of bars to display
|
||||||
|
port: Port for Dash server
|
||||||
|
"""
|
||||||
|
self.window_seconds = window_seconds
|
||||||
|
self.max_bars = max_bars
|
||||||
|
self.port = port
|
||||||
|
self._db_path: Optional[Path] = None
|
||||||
|
|
||||||
|
# Processed data storage
|
||||||
|
self._ohlc_data: List[Tuple[int, float, float, float, float, float]] = []
|
||||||
|
self._metrics_data: List[Metric] = []
|
||||||
|
|
||||||
|
# Simple cache for performance
|
||||||
|
self._cache_book_hash: Optional[int] = None
|
||||||
|
self._cache_db_path_hash: Optional[int] = None
|
||||||
|
|
||||||
|
# OHLC calculation state (matches existing visualizer pattern)
|
||||||
|
self._current_bucket_ts: Optional[int] = None
|
||||||
|
self._open = self._high = self._low = self._close = None
|
||||||
|
self._volume: float = 0.0
|
||||||
|
|
||||||
|
def set_db_path(self, db_path: Path) -> None:
|
||||||
|
"""Set database path for metrics loading."""
|
||||||
|
self._db_path = db_path
|
||||||
|
|
||||||
|
def update_from_book(self, book: Book) -> None:
|
||||||
|
"""Process book snapshots into OHLC data and load corresponding metrics."""
|
||||||
|
if not book.snapshots:
|
||||||
|
logging.warning("Book has no snapshots to visualize")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Simple cache check to avoid reprocessing same data
|
||||||
|
book_hash = hash((len(book.snapshots), book.first_timestamp, book.last_timestamp))
|
||||||
|
db_hash = hash(str(self._db_path)) if self._db_path else None
|
||||||
|
|
||||||
|
if (self._cache_book_hash == book_hash and
|
||||||
|
self._cache_db_path_hash == db_hash and
|
||||||
|
self._ohlc_data):
|
||||||
|
logging.info(f"Using cached data: {len(self._ohlc_data)} OHLC bars, {len(self._metrics_data)} metrics")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Clear previous data
|
||||||
|
self._ohlc_data.clear()
|
||||||
|
self._metrics_data.clear()
|
||||||
|
self._reset_ohlc_state()
|
||||||
|
|
||||||
|
# Process snapshots into OHLC bars (reusing existing logic)
|
||||||
|
self._process_snapshots_to_ohlc(book.snapshots)
|
||||||
|
|
||||||
|
# Load stored metrics for the same time range
|
||||||
|
if self._db_path and book.snapshots:
|
||||||
|
start_ts = min(s.timestamp for s in book.snapshots)
|
||||||
|
end_ts = max(s.timestamp for s in book.snapshots)
|
||||||
|
self._metrics_data = self._load_stored_metrics(start_ts, end_ts)
|
||||||
|
|
||||||
|
# Update cache
|
||||||
|
self._cache_book_hash = book_hash
|
||||||
|
self._cache_db_path_hash = db_hash
|
||||||
|
|
||||||
|
logging.info(f"Processed {len(self._ohlc_data)} OHLC bars and {len(self._metrics_data)} metrics")
|
||||||
|
|
||||||
|
def show(self) -> None:
|
||||||
|
"""Launch Dash server and display interactive charts with processed data."""
|
||||||
|
from dash_app import create_dash_app_with_data, create_dash_app
|
||||||
|
|
||||||
|
# Create Dash app with real data
|
||||||
|
if self._ohlc_data:
|
||||||
|
app = create_dash_app_with_data(
|
||||||
|
ohlc_data=self._ohlc_data,
|
||||||
|
metrics_data=self._metrics_data,
|
||||||
|
debug=True,
|
||||||
|
port=self.port
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
app = create_dash_app(debug=True, port=self.port)
|
||||||
|
|
||||||
|
# Log data summary
|
||||||
|
logging.info(f"Launching interactive visualizer:")
|
||||||
|
logging.info(f" - OHLC bars: {len(self._ohlc_data)}")
|
||||||
|
logging.info(f" - Metrics points: {len(self._metrics_data)}")
|
||||||
|
if self._ohlc_data:
|
||||||
|
start_time = self._ohlc_data[0][0]
|
||||||
|
end_time = self._ohlc_data[-1][0]
|
||||||
|
logging.info(f" - Time range: {start_time} to {end_time}")
|
||||||
|
|
||||||
|
app.run(debug=True, port=self.port, host='127.0.0.1')
|
||||||
|
|
||||||
|
def _reset_ohlc_state(self) -> None:
|
||||||
|
"""Reset OHLC calculation state."""
|
||||||
|
self._current_bucket_ts = None
|
||||||
|
self._open = self._high = self._low = self._close = None
|
||||||
|
self._volume = 0.0
|
||||||
|
|
||||||
|
def _bucket_start(self, ts: int) -> int:
|
||||||
|
"""Calculate bucket start timestamp (matches existing visualizer)."""
|
||||||
|
normalized_ts = self._normalize_ts_seconds(ts)
|
||||||
|
return normalized_ts - (normalized_ts % self.window_seconds)
|
||||||
|
|
||||||
|
def _normalize_ts_seconds(self, ts: int) -> int:
|
||||||
|
"""Normalize timestamp to seconds (matches existing visualizer)."""
|
||||||
|
its = int(ts)
|
||||||
|
if its > 100_000_000_000_000: # > 1e14 → microseconds
|
||||||
|
return its // 1_000_000
|
||||||
|
if its > 100_000_000_000: # > 1e11 → milliseconds
|
||||||
|
return its // 1_000
|
||||||
|
return its
|
||||||
|
|
||||||
|
def _process_snapshots_to_ohlc(self, snapshots) -> None:
|
||||||
|
"""Process book snapshots into OHLC bars (adapted from existing visualizer)."""
|
||||||
|
logging.info(f"Processing {len(snapshots)} snapshots into OHLC bars")
|
||||||
|
|
||||||
|
snapshot_count = 0
|
||||||
|
for snapshot in sorted(snapshots, key=lambda s: s.timestamp):
|
||||||
|
snapshot_count += 1
|
||||||
|
if not snapshot.bids or not snapshot.asks:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
best_bid = max(snapshot.bids.keys())
|
||||||
|
best_ask = min(snapshot.asks.keys())
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
mid = (float(best_bid) + float(best_ask)) / 2.0
|
||||||
|
ts_raw = int(snapshot.timestamp)
|
||||||
|
ts = self._normalize_ts_seconds(ts_raw)
|
||||||
|
bucket_ts = self._bucket_start(ts)
|
||||||
|
|
||||||
|
# Calculate volume from trades in this snapshot
|
||||||
|
snapshot_volume = sum(trade.size for trade in snapshot.trades)
|
||||||
|
|
||||||
|
# New bucket: close and store previous bar
|
||||||
|
if self._current_bucket_ts is None:
|
||||||
|
self._current_bucket_ts = bucket_ts
|
||||||
|
self._open = self._high = self._low = self._close = mid
|
||||||
|
self._volume = snapshot_volume
|
||||||
|
elif bucket_ts != self._current_bucket_ts:
|
||||||
|
self._append_current_bar()
|
||||||
|
self._current_bucket_ts = bucket_ts
|
||||||
|
self._open = self._high = self._low = self._close = mid
|
||||||
|
self._volume = snapshot_volume
|
||||||
|
else:
|
||||||
|
# Update current bucket OHLC and accumulate volume
|
||||||
|
if self._high is None or mid > self._high:
|
||||||
|
self._high = mid
|
||||||
|
if self._low is None or mid < self._low:
|
||||||
|
self._low = mid
|
||||||
|
self._close = mid
|
||||||
|
self._volume += snapshot_volume
|
||||||
|
|
||||||
|
# Finalize the last bar
|
||||||
|
self._append_current_bar()
|
||||||
|
|
||||||
|
logging.info(f"Created {len(self._ohlc_data)} OHLC bars from {snapshot_count} valid snapshots")
|
||||||
|
|
||||||
|
def _append_current_bar(self) -> None:
|
||||||
|
"""Finalize current OHLC bar and add to data list."""
|
||||||
|
if self._current_bucket_ts is None or self._open is None:
|
||||||
|
return
|
||||||
|
self._ohlc_data.append(
|
||||||
|
(
|
||||||
|
self._current_bucket_ts,
|
||||||
|
float(self._open),
|
||||||
|
float(self._high if self._high is not None else self._open),
|
||||||
|
float(self._low if self._low is not None else self._open),
|
||||||
|
float(self._close if self._close is not None else self._open),
|
||||||
|
float(self._volume),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_stored_metrics(self, start_timestamp: int, end_timestamp: int) -> List[Metric]:
|
||||||
|
"""Load stored metrics from database for the given time range."""
|
||||||
|
if not self._db_path:
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
repo = SQLiteOrderflowRepository(self._db_path)
|
||||||
|
with repo.connect() as conn:
|
||||||
|
return repo.load_metrics_by_timerange(conn, start_timestamp, end_timestamp)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error loading metrics for visualization: {e}")
|
||||||
|
return []
|
||||||
17
main.py
17
main.py
@ -5,7 +5,6 @@ from typing import List
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from storage import Storage
|
from storage import Storage
|
||||||
from strategies import DefaultStrategy
|
from strategies import DefaultStrategy
|
||||||
from visualizer import Visualizer
|
|
||||||
|
|
||||||
databases_path = Path("../data/OKX")
|
databases_path = Path("../data/OKX")
|
||||||
|
|
||||||
@ -22,7 +21,6 @@ def main(instrument: str = typer.Argument(..., help="Instrument to backtest, e.g
|
|||||||
|
|
||||||
storage = Storage(instrument)
|
storage = Storage(instrument)
|
||||||
strategy = DefaultStrategy(instrument)
|
strategy = DefaultStrategy(instrument)
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=500)
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
||||||
|
|
||||||
@ -35,25 +33,14 @@ def main(instrument: str = typer.Argument(..., help="Instrument to backtest, e.g
|
|||||||
|
|
||||||
logging.info(f"Processing database: {db_path.name}")
|
logging.info(f"Processing database: {db_path.name}")
|
||||||
|
|
||||||
# Set database path for strategy and visualizer to access stored metrics
|
|
||||||
strategy.set_db_path(db_path)
|
strategy.set_db_path(db_path)
|
||||||
visualizer.set_db_path(db_path)
|
|
||||||
|
|
||||||
# Build snapshots and calculate metrics
|
storage.build_booktick_from_db(db_path)
|
||||||
storage.build_booktick_from_db(db_path, db_date)
|
|
||||||
logging.info(f"Processed {len(storage.book.snapshots)} snapshots with metrics")
|
logging.info(f"Processed {len(storage.book.snapshots)} snapshots with metrics")
|
||||||
|
|
||||||
# Strategy analyzes metrics from the database
|
|
||||||
strategy.on_booktick(storage.book)
|
strategy.on_booktick(storage.book)
|
||||||
|
|
||||||
# Update visualization after processing each database
|
logging.info("Processing complete.")
|
||||||
logging.info(f"Updating visualization for {db_path.name}")
|
|
||||||
visualizer.update_from_book(storage.book)
|
|
||||||
|
|
||||||
# Show final visualization
|
|
||||||
logging.info("Processing complete. Displaying final visualization...")
|
|
||||||
if db_paths: # Ensure we have processed at least one database
|
|
||||||
visualizer.show()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@ -8,6 +8,10 @@ dependencies = [
|
|||||||
"matplotlib>=3.10.5",
|
"matplotlib>=3.10.5",
|
||||||
"pyqt5>=5.15.11",
|
"pyqt5>=5.15.11",
|
||||||
"typer>=0.16.1",
|
"typer>=0.16.1",
|
||||||
|
"dash>=2.18.0",
|
||||||
|
"plotly>=5.18.0",
|
||||||
|
"dash-bootstrap-components>=1.5.0",
|
||||||
|
"pandas>=2.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
|
|||||||
@ -1,132 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
import sqlite3
|
|
||||||
import logging
|
|
||||||
from typing import List, Dict, Tuple
|
|
||||||
|
|
||||||
from .sqlite_repository import SQLiteOrderflowRepository
|
|
||||||
from models import Metric
|
|
||||||
|
|
||||||
|
|
||||||
class SQLiteMetricsRepository(SQLiteOrderflowRepository):
|
|
||||||
"""Write-enabled repository for storing and loading metrics data alongside orderflow data."""
|
|
||||||
|
|
||||||
def create_metrics_table(self, conn: sqlite3.Connection) -> None:
|
|
||||||
"""Create the metrics table with proper indexes and foreign key constraints.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn: Active SQLite database connection.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Create metrics table following PRD schema
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS metrics (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
snapshot_id INTEGER NOT NULL,
|
|
||||||
timestamp TEXT NOT NULL,
|
|
||||||
obi REAL NOT NULL,
|
|
||||||
cvd REAL NOT NULL,
|
|
||||||
best_bid REAL,
|
|
||||||
best_ask REAL,
|
|
||||||
FOREIGN KEY (snapshot_id) REFERENCES book(id)
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Create indexes for efficient querying
|
|
||||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_timestamp ON metrics(timestamp)")
|
|
||||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_snapshot_id ON metrics(snapshot_id)")
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
logging.info("Metrics table and indexes created successfully")
|
|
||||||
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logging.error(f"Error creating metrics table: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def table_exists(self, conn: sqlite3.Connection, table_name: str) -> bool:
|
|
||||||
"""Check if a table exists in the database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn: Active SQLite database connection.
|
|
||||||
table_name: Name of the table to check.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if table exists, False otherwise.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
||||||
(table_name,)
|
|
||||||
)
|
|
||||||
return cursor.fetchone() is not None
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logging.error(f"Error checking if table {table_name} exists: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def insert_metrics_batch(self, conn: sqlite3.Connection, metrics: List[Metric]) -> None:
|
|
||||||
"""Insert multiple metrics in a single batch operation for performance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn: Active SQLite database connection.
|
|
||||||
metrics: List of Metric objects to insert.
|
|
||||||
"""
|
|
||||||
if not metrics:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Prepare batch data following existing batch pattern
|
|
||||||
batch_data = [
|
|
||||||
(m.snapshot_id, m.timestamp, m.obi, m.cvd, m.best_bid, m.best_ask)
|
|
||||||
for m in metrics
|
|
||||||
]
|
|
||||||
|
|
||||||
# Use executemany for batch insertion
|
|
||||||
conn.executemany(
|
|
||||||
"INSERT INTO metrics (snapshot_id, timestamp, obi, cvd, best_bid, best_ask) VALUES (?, ?, ?, ?, ?, ?)",
|
|
||||||
batch_data
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.debug(f"Inserted {len(metrics)} metrics records")
|
|
||||||
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logging.error(f"Error inserting metrics batch: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def load_metrics_by_timerange(self, conn: sqlite3.Connection, start_timestamp: int, end_timestamp: int) -> List[Metric]:
|
|
||||||
"""Load metrics within a specified timestamp range.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
conn: Active SQLite database connection.
|
|
||||||
start_timestamp: Start of the time range (inclusive).
|
|
||||||
end_timestamp: End of the time range (inclusive).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of Metric objects ordered by timestamp.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT snapshot_id, timestamp, obi, cvd, best_bid, best_ask FROM metrics WHERE timestamp >= ? AND timestamp <= ? ORDER BY timestamp ASC",
|
|
||||||
(start_timestamp, end_timestamp)
|
|
||||||
)
|
|
||||||
|
|
||||||
metrics = []
|
|
||||||
for batch in iter(lambda: cursor.fetchmany(5000), []):
|
|
||||||
for snapshot_id, timestamp, obi, cvd, best_bid, best_ask in batch:
|
|
||||||
metric = Metric(
|
|
||||||
snapshot_id=int(snapshot_id),
|
|
||||||
timestamp=int(timestamp),
|
|
||||||
obi=float(obi),
|
|
||||||
cvd=float(cvd),
|
|
||||||
best_bid=float(best_bid) if best_bid is not None else None,
|
|
||||||
best_ask=float(best_ask) if best_ask is not None else None,
|
|
||||||
)
|
|
||||||
metrics.append(metric)
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
logging.error(f"Error loading metrics by timerange: {e}")
|
|
||||||
return []
|
|
||||||
@ -5,7 +5,7 @@ from typing import Dict, Iterator, List, Tuple
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from models import Trade
|
from models import Trade, Metric
|
||||||
|
|
||||||
|
|
||||||
class SQLiteOrderflowRepository:
|
class SQLiteOrderflowRepository:
|
||||||
@ -13,31 +13,31 @@ class SQLiteOrderflowRepository:
|
|||||||
|
|
||||||
def __init__(self, db_path: Path) -> None:
|
def __init__(self, db_path: Path) -> None:
|
||||||
self.db_path = db_path
|
self.db_path = db_path
|
||||||
|
self.conn = None
|
||||||
|
|
||||||
def connect(self) -> sqlite3.Connection:
|
def connect(self) -> None:
|
||||||
conn = sqlite3.connect(str(self.db_path))
|
self.conn = sqlite3.connect(str(self.db_path))
|
||||||
conn.execute("PRAGMA journal_mode = OFF")
|
self.conn.execute("PRAGMA journal_mode = OFF")
|
||||||
conn.execute("PRAGMA synchronous = OFF")
|
self.conn.execute("PRAGMA synchronous = OFF")
|
||||||
conn.execute("PRAGMA cache_size = 100000")
|
self.conn.execute("PRAGMA cache_size = 100000")
|
||||||
conn.execute("PRAGMA temp_store = MEMORY")
|
self.conn.execute("PRAGMA temp_store = MEMORY")
|
||||||
conn.execute("PRAGMA mmap_size = 30000000000")
|
self.conn.execute("PRAGMA mmap_size = 30000000000")
|
||||||
return conn
|
|
||||||
|
|
||||||
def count_rows(self, conn: sqlite3.Connection, table: str) -> int:
|
def count_rows(self, table: str) -> int:
|
||||||
allowed_tables = {"book", "trades"}
|
allowed_tables = {"book", "trades"}
|
||||||
if table not in allowed_tables:
|
if table not in allowed_tables:
|
||||||
raise ValueError(f"Unsupported table name: {table}")
|
raise ValueError(f"Unsupported table name: {table}")
|
||||||
try:
|
try:
|
||||||
row = conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone()
|
row = self.conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone()
|
||||||
return int(row[0]) if row and row[0] is not None else 0
|
return int(row[0]) if row and row[0] is not None else 0
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
logging.error(f"Error counting rows in table {table}: {e}")
|
logging.error(f"Error counting rows in table {table}: {e}")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def load_trades_by_timestamp(self, conn: sqlite3.Connection) -> Dict[int, List[Trade]]:
|
def load_trades(self) -> Dict[int, List[Trade]]:
|
||||||
trades_by_timestamp: Dict[int, List[Trade]] = {}
|
trades: List[Trade] = []
|
||||||
try:
|
try:
|
||||||
cursor = conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"SELECT id, trade_id, price, size, side, timestamp FROM trades ORDER BY timestamp ASC"
|
"SELECT id, trade_id, price, size, side, timestamp FROM trades ORDER BY timestamp ASC"
|
||||||
)
|
)
|
||||||
@ -52,16 +52,14 @@ class SQLiteOrderflowRepository:
|
|||||||
side=str(side),
|
side=str(side),
|
||||||
timestamp=timestamp_int,
|
timestamp=timestamp_int,
|
||||||
)
|
)
|
||||||
if timestamp_int not in trades_by_timestamp:
|
trades.append(trade)
|
||||||
trades_by_timestamp[timestamp_int] = []
|
return trades
|
||||||
trades_by_timestamp[timestamp_int].append(trade)
|
|
||||||
return trades_by_timestamp
|
|
||||||
except sqlite3.Error as e:
|
except sqlite3.Error as e:
|
||||||
logging.error(f"Error loading trades: {e}")
|
logging.error(f"Error loading trades: {e}")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def iterate_book_rows(self, conn: sqlite3.Connection) -> Iterator[Tuple[int, str, str, int]]:
|
def iterate_book_rows(self) -> Iterator[Tuple[int, str, str, int]]:
|
||||||
cursor = conn.cursor()
|
cursor = self.conn.cursor()
|
||||||
cursor.execute("SELECT id, bids, asks, timestamp FROM book ORDER BY timestamp ASC")
|
cursor.execute("SELECT id, bids, asks, timestamp FROM book ORDER BY timestamp ASC")
|
||||||
while True:
|
while True:
|
||||||
rows = cursor.fetchmany(5000)
|
rows = cursor.fetchmany(5000)
|
||||||
@ -70,4 +68,121 @@ class SQLiteOrderflowRepository:
|
|||||||
for row in rows:
|
for row in rows:
|
||||||
yield row # (id, bids, asks, timestamp)
|
yield row # (id, bids, asks, timestamp)
|
||||||
|
|
||||||
|
def create_metrics_table(self) -> None:
|
||||||
|
"""Create the metrics table with proper indexes and foreign key constraints.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Active SQLite database connection.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Create metrics table following PRD schema
|
||||||
|
self.conn.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS metrics (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
snapshot_id INTEGER NOT NULL,
|
||||||
|
timestamp TEXT NOT NULL,
|
||||||
|
obi REAL NOT NULL,
|
||||||
|
cvd REAL NOT NULL,
|
||||||
|
best_bid REAL,
|
||||||
|
best_ask REAL,
|
||||||
|
FOREIGN KEY (snapshot_id) REFERENCES book(id)
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Create indexes for efficient querying
|
||||||
|
self.conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_timestamp ON metrics(timestamp)")
|
||||||
|
self.conn.execute("CREATE INDEX IF NOT EXISTS idx_metrics_snapshot_id ON metrics(snapshot_id)")
|
||||||
|
|
||||||
|
self.conn.commit()
|
||||||
|
logging.info("Metrics table and indexes created successfully")
|
||||||
|
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logging.error(f"Error creating metrics table: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def table_exists(self, table_name: str) -> bool:
|
||||||
|
"""Check if a table exists in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Active SQLite database connection.
|
||||||
|
table_name: Name of the table to check.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if table exists, False otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
cursor = self.conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
||||||
|
(table_name,)
|
||||||
|
)
|
||||||
|
return cursor.fetchone() is not None
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logging.error(f"Error checking if table {table_name} exists: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def insert_metrics_batch(self, metrics: List[Metric]) -> None:
|
||||||
|
"""Insert multiple metrics in a single batch operation for performance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Active SQLite database connection.
|
||||||
|
metrics: List of Metric objects to insert.
|
||||||
|
"""
|
||||||
|
if not metrics:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Prepare batch data following existing batch pattern
|
||||||
|
batch_data = [
|
||||||
|
(m.snapshot_id, m.timestamp, m.obi, m.cvd, m.best_bid, m.best_ask)
|
||||||
|
for m in metrics
|
||||||
|
]
|
||||||
|
|
||||||
|
# Use executemany for batch insertion
|
||||||
|
self.conn.executemany(
|
||||||
|
"INSERT INTO metrics (snapshot_id, timestamp, obi, cvd, best_bid, best_ask) VALUES (?, ?, ?, ?, ?, ?)",
|
||||||
|
batch_data
|
||||||
|
)
|
||||||
|
|
||||||
|
logging.debug(f"Inserted {len(metrics)} metrics records")
|
||||||
|
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logging.error(f"Error inserting metrics batch: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def load_metrics_by_timerange(self, start_timestamp: int, end_timestamp: int) -> List[Metric]:
|
||||||
|
"""Load metrics within a specified timestamp range.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
conn: Active SQLite database connection.
|
||||||
|
start_timestamp: Start of the time range (inclusive).
|
||||||
|
end_timestamp: End of the time range (inclusive).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of Metric objects ordered by timestamp.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
cursor = self.conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT snapshot_id, timestamp, obi, cvd, best_bid, best_ask FROM metrics WHERE timestamp >= ? AND timestamp <= ? ORDER BY timestamp ASC",
|
||||||
|
(start_timestamp, end_timestamp)
|
||||||
|
)
|
||||||
|
|
||||||
|
metrics = []
|
||||||
|
for batch in iter(lambda: cursor.fetchmany(5000), []):
|
||||||
|
for snapshot_id, timestamp, obi, cvd, best_bid, best_ask in batch:
|
||||||
|
metric = Metric(
|
||||||
|
snapshot_id=int(snapshot_id),
|
||||||
|
timestamp=int(timestamp),
|
||||||
|
obi=float(obi),
|
||||||
|
cvd=float(cvd),
|
||||||
|
best_bid=float(best_bid) if best_bid is not None else None,
|
||||||
|
best_ask=float(best_ask) if best_ask is not None else None,
|
||||||
|
)
|
||||||
|
metrics.append(metric)
|
||||||
|
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logging.error(f"Error loading metrics by timerange: {e}")
|
||||||
|
return []
|
||||||
|
|||||||
153
run_with_existing_metrics.py
Normal file
153
run_with_existing_metrics.py
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Run interactive visualizer using PRE-CALCULATED metrics from the database.
|
||||||
|
No recalculation needed - just read and display!
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from interactive_visualizer import InteractiveVisualizer
|
||||||
|
from models import Book, BookSnapshot, Trade
|
||||||
|
from parsers.orderbook_parser import OrderbookParser
|
||||||
|
import sqlite3
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def load_book_snapshots_only(db_path: Path, limit: int = 10000):
|
||||||
|
"""Load book snapshots without recalculating metrics."""
|
||||||
|
book = Book()
|
||||||
|
parser = OrderbookParser()
|
||||||
|
|
||||||
|
print(f"📖 Reading book snapshots (limit: {limit})...")
|
||||||
|
|
||||||
|
# Read book data directly without triggering metric calculation
|
||||||
|
conn = sqlite3.connect(f'file:{db_path}?mode=ro', uri=True)
|
||||||
|
|
||||||
|
# Load trades first for efficiency
|
||||||
|
print(" 📈 Loading trades...")
|
||||||
|
trades_by_timestamp = {}
|
||||||
|
trade_cursor = conn.execute('SELECT id, trade_id, price, size, side, timestamp FROM trades ORDER BY timestamp')
|
||||||
|
for trade_row in trade_cursor:
|
||||||
|
timestamp = int(trade_row[5])
|
||||||
|
trade = Trade(
|
||||||
|
id=trade_row[0],
|
||||||
|
trade_id=float(trade_row[1]),
|
||||||
|
price=float(trade_row[2]),
|
||||||
|
size=float(trade_row[3]),
|
||||||
|
side=trade_row[4],
|
||||||
|
timestamp=timestamp
|
||||||
|
)
|
||||||
|
if timestamp not in trades_by_timestamp:
|
||||||
|
trades_by_timestamp[timestamp] = []
|
||||||
|
trades_by_timestamp[timestamp].append(trade)
|
||||||
|
|
||||||
|
# Get snapshots
|
||||||
|
cursor = conn.execute('''
|
||||||
|
SELECT id, instrument, bids, asks, timestamp
|
||||||
|
FROM book
|
||||||
|
ORDER BY timestamp
|
||||||
|
LIMIT ?
|
||||||
|
''', (limit,))
|
||||||
|
|
||||||
|
snapshot_count = 0
|
||||||
|
for row in cursor:
|
||||||
|
try:
|
||||||
|
row_id, instrument, bids_text, asks_text, timestamp = row
|
||||||
|
timestamp_int = int(timestamp)
|
||||||
|
|
||||||
|
# Create snapshot using the same logic as Storage._snapshot_from_row
|
||||||
|
snapshot = BookSnapshot(
|
||||||
|
id=row_id,
|
||||||
|
timestamp=timestamp_int,
|
||||||
|
bids={},
|
||||||
|
asks={},
|
||||||
|
trades=trades_by_timestamp.get(timestamp_int, []),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Parse bids and asks using the parser
|
||||||
|
parser.parse_side(bids_text, snapshot.bids)
|
||||||
|
parser.parse_side(asks_text, snapshot.asks)
|
||||||
|
|
||||||
|
# Only add snapshots that have both bids and asks
|
||||||
|
if snapshot.bids and snapshot.asks:
|
||||||
|
book.add_snapshot(snapshot)
|
||||||
|
snapshot_count += 1
|
||||||
|
|
||||||
|
if snapshot_count % 1000 == 0:
|
||||||
|
print(f" 📊 Loaded {snapshot_count} snapshots...")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning(f"Error parsing snapshot {row[0]}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
print(f"✅ Loaded {len(book.snapshots)} snapshots with trades")
|
||||||
|
return book
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print("🚀 USING PRE-CALCULATED METRICS FROM DATABASE")
|
||||||
|
print("=" * 55)
|
||||||
|
|
||||||
|
# Database path
|
||||||
|
db_path = Path("../data/OKX/BTC-USDT-25-06-09.db")
|
||||||
|
|
||||||
|
if not db_path.exists():
|
||||||
|
print(f"❌ Database not found: {db_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Load ONLY book snapshots (no metric recalculation)
|
||||||
|
book = load_book_snapshots_only(db_path, limit=5000) # Start with 5K snapshots
|
||||||
|
|
||||||
|
if not book.snapshots:
|
||||||
|
print("❌ No snapshots loaded")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"✅ Book loaded: {len(book.snapshots)} snapshots")
|
||||||
|
print(f"✅ Time range: {book.first_timestamp} to {book.last_timestamp}")
|
||||||
|
|
||||||
|
# Create visualizer
|
||||||
|
viz = InteractiveVisualizer(
|
||||||
|
window_seconds=6*3600, # 6-hour bars
|
||||||
|
port=8050
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set database path so it can load PRE-CALCULATED metrics
|
||||||
|
viz.set_db_path(db_path)
|
||||||
|
|
||||||
|
# Process book data (will load existing metrics automatically)
|
||||||
|
print("⚙️ Processing book data and loading existing metrics...")
|
||||||
|
viz.update_from_book(book)
|
||||||
|
|
||||||
|
print(f"✅ Generated {len(viz._ohlc_data)} OHLC bars")
|
||||||
|
print(f"✅ Loaded {len(viz._metrics_data)} pre-calculated metrics")
|
||||||
|
|
||||||
|
if viz._ohlc_data:
|
||||||
|
sample_bar = viz._ohlc_data[0]
|
||||||
|
print(f"✅ Sample OHLC: O={sample_bar[1]:.2f}, H={sample_bar[2]:.2f}, L={sample_bar[3]:.2f}, C={sample_bar[4]:.2f}")
|
||||||
|
|
||||||
|
print()
|
||||||
|
print("🌐 LAUNCHING INTERACTIVE DASHBOARD")
|
||||||
|
print("=" * 55)
|
||||||
|
print("🚀 Server starting at: http://127.0.0.1:8050")
|
||||||
|
print("📊 Features available:")
|
||||||
|
print(" ✅ OHLC candlestick chart")
|
||||||
|
print(" ✅ Volume bar chart")
|
||||||
|
print(" ✅ OBI line chart (from existing metrics)")
|
||||||
|
print(" ✅ CVD line chart (from existing metrics)")
|
||||||
|
print(" ✅ Synchronized zoom/pan")
|
||||||
|
print(" ✅ Professional dark theme")
|
||||||
|
print()
|
||||||
|
print("⏹️ Press Ctrl+C to stop the server")
|
||||||
|
print("=" * 55)
|
||||||
|
|
||||||
|
# Launch the dashboard
|
||||||
|
viz.show()
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n⏹️ Server stopped by user")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
33
storage.py
33
storage.py
@ -13,7 +13,6 @@ import logging
|
|||||||
|
|
||||||
from models import OrderbookLevel, Trade, BookSnapshot, Book, MetricCalculator, Metric
|
from models import OrderbookLevel, Trade, BookSnapshot, Book, MetricCalculator, Metric
|
||||||
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
|
||||||
from parsers.orderbook_parser import OrderbookParser
|
from parsers.orderbook_parser import OrderbookParser
|
||||||
|
|
||||||
class Storage:
|
class Storage:
|
||||||
@ -33,49 +32,41 @@ class Storage:
|
|||||||
self._debug = False
|
self._debug = False
|
||||||
self._parser = OrderbookParser(price_cache=self._price_cache, debug=self._debug)
|
self._parser = OrderbookParser(price_cache=self._price_cache, debug=self._debug)
|
||||||
|
|
||||||
def build_booktick_from_db(self, db_path: Path, db_date: datetime) -> None:
|
def build_booktick_from_db(self, db_path: Path) -> None:
|
||||||
"""Hydrate the in-memory `book` from a SQLite database and calculate metrics.
|
"""Hydrate the in-memory `book` from a SQLite database and calculate metrics.
|
||||||
|
|
||||||
Builds a Book instance with sequential snapshots and calculates OBI/CVD metrics.
|
Builds a Book instance with sequential snapshots and calculates OBI/CVD metrics.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db_path: Path to the SQLite database file.
|
db_path: Path to the SQLite database file.
|
||||||
db_date: Date associated with the database (currently informational).
|
|
||||||
"""
|
"""
|
||||||
# Reset the book to start fresh
|
|
||||||
self.book = Book()
|
self.book = Book()
|
||||||
|
|
||||||
metrics_repo = SQLiteMetricsRepository(db_path)
|
metrics_repo = SQLiteOrderflowRepository(db_path)
|
||||||
with metrics_repo.connect() as conn:
|
with metrics_repo.connect() as conn:
|
||||||
# Create metrics table if it doesn't exist
|
|
||||||
if not metrics_repo.table_exists(conn, "metrics"):
|
if not metrics_repo.table_exists(conn, "metrics"):
|
||||||
metrics_repo.create_metrics_table(conn)
|
metrics_repo.create_metrics_table(conn)
|
||||||
|
|
||||||
# Load trades grouped by timestamp
|
trades = metrics_repo.load_trades(conn)
|
||||||
trades_by_timestamp = metrics_repo.load_trades_by_timestamp(conn)
|
|
||||||
|
|
||||||
# Check if we have any orderbook data
|
|
||||||
total_rows = metrics_repo.count_rows(conn, "book")
|
total_rows = metrics_repo.count_rows(conn, "book")
|
||||||
if total_rows == 0:
|
if total_rows == 0:
|
||||||
logging.info(f"No orderbook data found in {db_path}")
|
logging.info(f"No orderbook data found in {db_path}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Process orderbook data and calculate metrics
|
|
||||||
rows_iter = metrics_repo.iterate_book_rows(conn)
|
rows_iter = metrics_repo.iterate_book_rows(conn)
|
||||||
self._create_snapshots_and_metrics(rows_iter, trades_by_timestamp, total_rows, conn, metrics_repo)
|
self._create_snapshots_and_metrics(rows_iter, trades, total_rows, conn)
|
||||||
|
|
||||||
# Log summary
|
|
||||||
logging.info(f"Processed {len(self.book.snapshots)} snapshots with metrics from {db_path}")
|
logging.info(f"Processed {len(self.book.snapshots)} snapshots with metrics from {db_path}")
|
||||||
|
|
||||||
def _create_snapshots_and_metrics(self, rows_iter: Iterator[Tuple[int, str, str, int]], trades_by_timestamp: Dict[int, List[Trade]], total_rows: int, conn, metrics_repo: SQLiteMetricsRepository) -> None:
|
def _create_snapshots_and_metrics(self, rows_iter: Iterator[Tuple[int, str, str, int]], trades: List[Trade], total_rows: int, conn) -> None:
|
||||||
"""Create BookSnapshot instances and calculate metrics, storing them in database.
|
"""Create BookSnapshot instances and calculate metrics, storing them in database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
rows_iter: Iterator yielding (id, bids_text, asks_text, timestamp)
|
rows_iter: Iterator yielding (id, bids_text, asks_text, timestamp)
|
||||||
trades_by_timestamp: Dictionary mapping timestamps to lists of trades
|
trades: List of trades
|
||||||
total_rows: Total number of rows in the book table
|
total_rows: Total number of rows in the book table
|
||||||
conn: Database connection for storing metrics
|
conn: Database connection for storing metrics
|
||||||
metrics_repo: Repository instance for metrics operations
|
|
||||||
"""
|
"""
|
||||||
# Initialize CVD tracking
|
# Initialize CVD tracking
|
||||||
current_cvd = 0.0
|
current_cvd = 0.0
|
||||||
@ -90,11 +81,10 @@ class Storage:
|
|||||||
last_report_time = start_time
|
last_report_time = start_time
|
||||||
|
|
||||||
for row_id, bids_text, asks_text, timestamp in rows_iter:
|
for row_id, bids_text, asks_text, timestamp in rows_iter:
|
||||||
snapshot = self._snapshot_from_row(row_id, bids_text, asks_text, timestamp, trades_by_timestamp)
|
snapshot = self._snapshot_from_row(row_id, bids_text, asks_text, timestamp, trades)
|
||||||
if snapshot is not None:
|
if snapshot is not None:
|
||||||
# Calculate metrics for this snapshot
|
# Calculate metrics for this snapshot
|
||||||
obi = MetricCalculator.calculate_obi(snapshot)
|
obi = MetricCalculator.calculate_obi(snapshot)
|
||||||
trades = trades_by_timestamp.get(int(timestamp), [])
|
|
||||||
volume_delta = MetricCalculator.calculate_volume_delta(trades)
|
volume_delta = MetricCalculator.calculate_volume_delta(trades)
|
||||||
current_cvd = MetricCalculator.calculate_cvd(current_cvd, volume_delta)
|
current_cvd = MetricCalculator.calculate_cvd(current_cvd, volume_delta)
|
||||||
best_bid, best_ask = MetricCalculator.get_best_bid_ask(snapshot)
|
best_bid, best_ask = MetricCalculator.get_best_bid_ask(snapshot)
|
||||||
@ -115,6 +105,8 @@ class Storage:
|
|||||||
|
|
||||||
# Insert metrics batch when it reaches batch_size
|
# Insert metrics batch when it reaches batch_size
|
||||||
if len(metrics_batch) >= batch_size:
|
if len(metrics_batch) >= batch_size:
|
||||||
|
# Use the metrics repository directly via connection
|
||||||
|
metrics_repo = SQLiteOrderflowRepository(Path("dummy")) # Path not used for existing conn
|
||||||
metrics_repo.insert_metrics_batch(conn, metrics_batch)
|
metrics_repo.insert_metrics_batch(conn, metrics_batch)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
metrics_batch = []
|
metrics_batch = []
|
||||||
@ -132,15 +124,16 @@ class Storage:
|
|||||||
|
|
||||||
# Insert remaining metrics
|
# Insert remaining metrics
|
||||||
if metrics_batch:
|
if metrics_batch:
|
||||||
|
metrics_repo = SQLiteOrderflowRepository(Path("dummy")) # Path not used for existing conn
|
||||||
metrics_repo.insert_metrics_batch(conn, metrics_batch)
|
metrics_repo.insert_metrics_batch(conn, metrics_batch)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
def _create_snapshots_from_rows(self, rows_iter: Iterator[Tuple[int, str, str, int]], trades_by_timestamp: Dict[int, List[Trade]], total_rows: int) -> None:
|
def _create_snapshots_from_rows(self, rows_iter: Iterator[Tuple[int, str, str, int]], trades: List[Trade], total_rows: int) -> None:
|
||||||
"""Create BookSnapshot instances from database rows and add them to the book.
|
"""Create BookSnapshot instances from database rows and add them to the book.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
rows_iter: Iterator yielding (id, bids_text, asks_text, timestamp)
|
rows_iter: Iterator yielding (id, bids_text, asks_text, timestamp)
|
||||||
trades_by_timestamp: Dictionary mapping timestamps to lists of trades
|
trades: List of trades
|
||||||
total_rows: Total number of rows in the book table
|
total_rows: Total number of rows in the book table
|
||||||
"""
|
"""
|
||||||
# Get reference to the book
|
# Get reference to the book
|
||||||
@ -154,7 +147,7 @@ class Storage:
|
|||||||
last_report_time = start_time
|
last_report_time = start_time
|
||||||
|
|
||||||
for row_id, bids_text, asks_text, timestamp in rows_iter:
|
for row_id, bids_text, asks_text, timestamp in rows_iter:
|
||||||
snapshot = self._snapshot_from_row(row_id, bids_text, asks_text, timestamp, trades_by_timestamp)
|
snapshot = self._snapshot_from_row(row_id, bids_text, asks_text, timestamp, trades)
|
||||||
if snapshot is not None:
|
if snapshot is not None:
|
||||||
book.add_snapshot(snapshot)
|
book.add_snapshot(snapshot)
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,7 @@ from typing import Optional, Any, cast, List
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from storage import Book, BookSnapshot
|
from storage import Book, BookSnapshot
|
||||||
from models import MetricCalculator, Metric
|
from models import MetricCalculator, Metric
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
|
|
||||||
class DefaultStrategy:
|
class DefaultStrategy:
|
||||||
"""Strategy that calculates and analyzes OBI and CVD metrics from stored data."""
|
"""Strategy that calculates and analyzes OBI and CVD metrics from stored data."""
|
||||||
@ -48,9 +48,9 @@ class DefaultStrategy:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
metrics_repo = SQLiteMetricsRepository(self._db_path)
|
repo = SQLiteOrderflowRepository(self._db_path)
|
||||||
with metrics_repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
return metrics_repo.load_metrics_by_timerange(conn, start_timestamp, end_timestamp)
|
return repo.load_metrics_by_timerange(conn, start_timestamp, end_timestamp)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error loading stored metrics: {e}")
|
logging.error(f"Error loading stored metrics: {e}")
|
||||||
return []
|
return []
|
||||||
|
|||||||
208
tasks/prd-interactive-visualizer.md
Normal file
208
tasks/prd-interactive-visualizer.md
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
# PRD: Interactive Visualizer with Plotly + Dash
|
||||||
|
|
||||||
|
## Introduction/Overview
|
||||||
|
|
||||||
|
The current orderflow backtest system uses a static matplotlib-based visualizer that displays OHLC candlesticks, volume bars, Order Book Imbalance (OBI), and Cumulative Volume Delta (CVD) charts. This PRD outlines the development of a new interactive visualization system using Plotly + Dash that will provide real-time interactivity, detailed data inspection, and enhanced user experience for cryptocurrency trading analysis.
|
||||||
|
|
||||||
|
The goal is to replace the static visualization with a professional, web-based interactive dashboard that allows traders to explore orderbook metrics with precision and flexibility.
|
||||||
|
|
||||||
|
## Goals
|
||||||
|
|
||||||
|
1. **Replace Static Visualization**: Create a new `InteractiveVisualizer` class using Plotly + Dash
|
||||||
|
2. **Enable Cross-Chart Interactivity**: Implement synchronized zooming, panning, and time range selection across all charts
|
||||||
|
3. **Provide Precision Navigation**: Add crosshair cursor with vertical line indicator across all charts
|
||||||
|
4. **Display Contextual Information**: Show detailed metrics in a side panel when hovering over data points
|
||||||
|
5. **Support Multiple Time Granularities**: Allow users to adjust time resolution dynamically
|
||||||
|
6. **Maintain Performance**: Handle large datasets (months of data) with smooth interactions
|
||||||
|
7. **Preserve Integration**: Seamlessly integrate with existing metrics storage and data processing pipeline
|
||||||
|
|
||||||
|
## User Stories
|
||||||
|
|
||||||
|
### Primary Use Cases
|
||||||
|
- **US-1**: As a trader, I want to zoom into specific time periods across all charts simultaneously so that I can analyze market behavior during critical moments
|
||||||
|
- **US-2**: As a trader, I want to see a vertical crosshair line that spans all charts so that I can precisely align data points across OHLC, volume, OBI, and CVD metrics
|
||||||
|
- **US-3**: As a trader, I want to hover over any data point and see detailed information in a side panel so that I can inspect exact values without cluttering the charts
|
||||||
|
- **US-4**: As a trader, I want to pan through historical data smoothly so that I can explore different time periods efficiently
|
||||||
|
- **US-5**: As a trader, I want to reset CVD calculations from a selected point in time so that I can analyze cumulative volume delta from specific market events
|
||||||
|
|
||||||
|
### Secondary Use Cases
|
||||||
|
- **US-6**: As a trader, I want to adjust time granularity (1min, 5min, 1hour) so that I can view data at different resolutions
|
||||||
|
- **US-7**: As a trader, I want navigation controls (reset zoom, home button) so that I can quickly return to full data view
|
||||||
|
- **US-8**: As a trader, I want to select custom time ranges so that I can focus analysis on specific market sessions
|
||||||
|
|
||||||
|
## Functional Requirements
|
||||||
|
|
||||||
|
### Core Interactive Features
|
||||||
|
1. **F1**: The system must provide synchronized zooming across all four charts (OHLC, Volume, OBI, CVD)
|
||||||
|
2. **F2**: The system must provide synchronized panning across all four charts with shared X-axis
|
||||||
|
3. **F3**: The system must display a vertical crosshair line that spans all charts and follows mouse cursor
|
||||||
|
4. **F4**: The system must show detailed hover information for each chart type:
|
||||||
|
- OHLC: timestamp, open, high, low, close, spread
|
||||||
|
- Volume: timestamp, total volume, buy/sell breakdown if available
|
||||||
|
- OBI: timestamp, OBI value, bid volume, ask volume, imbalance percentage
|
||||||
|
- CVD: timestamp, CVD value, volume delta, cumulative change
|
||||||
|
|
||||||
|
### User Interface Requirements
|
||||||
|
5. **F5**: The system must display charts in a 4-row layout with shared X-axis (OHLC on top, Volume, OBI, CVD at bottom)
|
||||||
|
6. **F6**: The system must provide a side panel on the right displaying detailed information for the current cursor position
|
||||||
|
7. **F7**: The system must include navigation controls:
|
||||||
|
- Zoom in/out buttons
|
||||||
|
- Reset zoom button
|
||||||
|
- Home view button
|
||||||
|
- Time range selector
|
||||||
|
8. **F8**: The system must provide time granularity controls (1min, 5min, 15min, 1hour, 6hour)
|
||||||
|
|
||||||
|
### Data Integration Requirements
|
||||||
|
9. **F9**: The system must integrate with existing `SQLiteOrderflowRepository` for metrics data loading
|
||||||
|
10. **F10**: The system must support loading data from multiple database files seamlessly
|
||||||
|
11. **F11**: The system must maintain the existing `set_db_path()` and `update_from_book()` interface for compatibility
|
||||||
|
12. **F12**: The system must calculate OHLC bars from snapshots with configurable time windows
|
||||||
|
|
||||||
|
### Performance Requirements
|
||||||
|
13. **F13**: The system must render charts with <2 second initial load time for datasets up to 1 million data points
|
||||||
|
14. **F14**: The system must provide smooth zooming and panning interactions with <100ms response time
|
||||||
|
15. **F15**: The system must efficiently update hover information with <50ms latency
|
||||||
|
|
||||||
|
### CVD Reset Functionality
|
||||||
|
16. **F16**: The system must allow users to click on any point in the CVD chart to reset cumulative calculation from that timestamp
|
||||||
|
17. **F17**: The system must visually indicate CVD reset points with markers or annotations
|
||||||
|
18. **F18**: The system must recalculate and redraw CVD values from the reset point forward
|
||||||
|
|
||||||
|
## Non-Goals (Out of Scope)
|
||||||
|
|
||||||
|
1. **Advanced Drawing Tools**: Trend lines, Fibonacci retracements, or annotation tools
|
||||||
|
2. **Multiple Instrument Support**: Multi-symbol comparison or overlay charts
|
||||||
|
3. **Real-time Streaming**: Live data updates or WebSocket integration
|
||||||
|
4. **Export Functionality**: Chart export to PNG/PDF or data export to CSV
|
||||||
|
5. **User Authentication**: User accounts, saved layouts, or personalization
|
||||||
|
6. **Mobile Optimization**: Touch interfaces or responsive mobile design
|
||||||
|
7. **Advanced Indicators**: Technical analysis indicators beyond OBI/CVD
|
||||||
|
8. **Alert System**: Price alerts, threshold notifications, or automated signals
|
||||||
|
|
||||||
|
## Design Considerations
|
||||||
|
|
||||||
|
### Chart Layout
|
||||||
|
- **Layout**: 4-row subplot layout with 80% chart area, 20% side panel
|
||||||
|
- **Color Scheme**: Professional dark theme with customizable colors
|
||||||
|
- **Typography**: Clear, readable fonts optimized for financial data
|
||||||
|
- **Responsive Design**: Adaptable to different screen sizes (desktop focus)
|
||||||
|
|
||||||
|
### Side Panel Design
|
||||||
|
```
|
||||||
|
┌─────────────────┐
|
||||||
|
│ Current Data │
|
||||||
|
├─────────────────┤
|
||||||
|
│ Time: 16:30:45 │
|
||||||
|
│ Price: $50,123 │
|
||||||
|
│ Volume: 1,234 │
|
||||||
|
│ OBI: 0.234 │
|
||||||
|
│ CVD: -123.45 │
|
||||||
|
├─────────────────┤
|
||||||
|
│ Controls │
|
||||||
|
│ [Reset CVD] │
|
||||||
|
│ [Zoom Reset] │
|
||||||
|
│ [Time Range ▼] │
|
||||||
|
│ [Granularity ▼] │
|
||||||
|
└─────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Navigation Controls
|
||||||
|
- **Zoom**: Mouse wheel, zoom box selection, zoom buttons
|
||||||
|
- **Pan**: Click and drag, arrow keys, scroll bars
|
||||||
|
- **Reset**: Double-click to auto-scale, reset button to full view
|
||||||
|
- **Selection**: Click and drag for time range selection
|
||||||
|
|
||||||
|
## Technical Considerations
|
||||||
|
|
||||||
|
### Architecture Changes
|
||||||
|
- **New Class**: `InteractiveVisualizer` class separate from existing `Visualizer`
|
||||||
|
- **Dependencies**: Add `dash`, `plotly`, `dash-bootstrap-components` to requirements
|
||||||
|
- **Web Server**: Dash development server for local deployment
|
||||||
|
- **Data Flow**: Maintain existing metrics loading pipeline, adapt to Plotly data structures
|
||||||
|
|
||||||
|
### Integration Points
|
||||||
|
```python
|
||||||
|
# Maintain existing interface for compatibility
|
||||||
|
class InteractiveVisualizer:
|
||||||
|
def set_db_path(self, db_path: Path) -> None
|
||||||
|
def update_from_book(self, book: Book) -> None
|
||||||
|
def show(self) -> None # Launch Dash server instead of plt.show()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Data Structure Adaptation
|
||||||
|
- **OHLC Data**: Convert bars to Plotly candlestick format
|
||||||
|
- **Metrics Data**: Transform to Plotly time series format
|
||||||
|
- **Memory Management**: Implement data decimation for large datasets
|
||||||
|
- **Caching**: Cache processed data to improve interaction performance
|
||||||
|
|
||||||
|
### Technology Stack
|
||||||
|
- **Frontend**: Dash + Plotly.js for charts
|
||||||
|
- **Backend**: Python Dash server with existing data pipeline
|
||||||
|
- **Styling**: Dash Bootstrap Components for professional UI
|
||||||
|
- **Data Processing**: Pandas for efficient data manipulation
|
||||||
|
|
||||||
|
## Success Metrics
|
||||||
|
|
||||||
|
### User Experience Metrics
|
||||||
|
1. **Interaction Responsiveness**: 95% of zoom/pan operations complete within 100ms
|
||||||
|
2. **Data Precision**: 100% accuracy in crosshair positioning and hover data display
|
||||||
|
3. **Navigation Efficiency**: Users can navigate to specific time periods 3x faster than static charts
|
||||||
|
|
||||||
|
### Technical Performance Metrics
|
||||||
|
4. **Load Time**: Initial chart rendering completes within 2 seconds for 500k data points
|
||||||
|
5. **Memory Usage**: Interactive visualizer uses <150% memory compared to static version
|
||||||
|
6. **Error Rate**: <1% interaction failures or display errors during normal usage
|
||||||
|
|
||||||
|
### Feature Adoption Metrics
|
||||||
|
7. **Feature Usage**: CVD reset functionality used in >30% of analysis sessions
|
||||||
|
8. **Time Range Analysis**: Custom time range selection used in >50% of sessions
|
||||||
|
9. **Granularity Changes**: Time resolution adjustment used in >40% of sessions
|
||||||
|
|
||||||
|
## Implementation Priority
|
||||||
|
|
||||||
|
### Phase 1: Core Interactive Charts (High Priority)
|
||||||
|
- Basic Plotly + Dash setup
|
||||||
|
- 4-chart layout with synchronized axes
|
||||||
|
- Basic zoom, pan, and crosshair functionality
|
||||||
|
- Integration with existing data pipeline
|
||||||
|
|
||||||
|
### Phase 2: Enhanced Interactivity (High Priority)
|
||||||
|
- Side panel with hover information
|
||||||
|
- Navigation controls and buttons
|
||||||
|
- Time granularity selection
|
||||||
|
- CVD reset functionality
|
||||||
|
|
||||||
|
### Phase 3: Performance Optimization (Medium Priority)
|
||||||
|
- Large dataset handling
|
||||||
|
- Interaction performance tuning
|
||||||
|
- Memory usage optimization
|
||||||
|
- Error handling and edge cases
|
||||||
|
|
||||||
|
### Phase 4: Polish and UX (Medium Priority)
|
||||||
|
- Professional styling and themes
|
||||||
|
- Enhanced navigation controls
|
||||||
|
- Time range selection tools
|
||||||
|
- User experience refinements
|
||||||
|
|
||||||
|
## Open Questions
|
||||||
|
|
||||||
|
1. **Deployment Method**: Should the interactive visualizer run as a local Dash server or be deployable as a standalone web application?
|
||||||
|
|
||||||
|
2. **Data Decimation Strategy**: How should the system handle datasets with millions of points while maintaining interactivity? Should it implement automatic decimation based on zoom level?
|
||||||
|
|
||||||
|
3. **CVD Reset Persistence**: Should CVD reset points be saved to the database or only exist in the current session?
|
||||||
|
|
||||||
|
4. **Multiple Database Sessions**: How should the interactive visualizer handle switching between different database files during the same session?
|
||||||
|
|
||||||
|
5. **Backward Compatibility**: Should the system maintain both static and interactive visualizers, or completely replace the matplotlib implementation?
|
||||||
|
|
||||||
|
6. **Configuration Management**: How should users configure default time granularities, color schemes, and layout preferences?
|
||||||
|
|
||||||
|
7. **Performance Baselines**: What are the acceptable performance thresholds for different dataset sizes and interaction types?
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Document Version**: 1.0
|
||||||
|
**Created**: Current Date
|
||||||
|
**Target Audience**: Junior Developer
|
||||||
|
**Estimated Implementation**: 3-4 weeks for complete feature set
|
||||||
74
tasks/tasks-prd-interactive-visualizer.md
Normal file
74
tasks/tasks-prd-interactive-visualizer.md
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Tasks: Interactive Visualizer with Plotly + Dash
|
||||||
|
|
||||||
|
## Relevant Files
|
||||||
|
|
||||||
|
- `interactive_visualizer.py` - Main InteractiveVisualizer class implementing Plotly + Dash interface
|
||||||
|
- `tests/test_interactive_visualizer.py` - Unit tests for InteractiveVisualizer class
|
||||||
|
- `dash_app.py` - Dash application setup and layout configuration
|
||||||
|
- `tests/test_dash_app.py` - Unit tests for Dash application components
|
||||||
|
- `dash_callbacks.py` - Dash callback functions for interactivity and data updates
|
||||||
|
- `tests/test_dash_callbacks.py` - Unit tests for callback functions
|
||||||
|
- `dash_components.py` - Custom Dash components for side panel and controls
|
||||||
|
- `tests/test_dash_components.py` - Unit tests for custom components
|
||||||
|
- `data_adapters.py` - Data transformation utilities for Plotly format conversion
|
||||||
|
- `tests/test_data_adapters.py` - Unit tests for data adapter functions
|
||||||
|
- `pyproject.toml` - Updated dependencies including dash, plotly, dash-bootstrap-components
|
||||||
|
- `main.py` - Updated to support both static and interactive visualizer options
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
- Unit tests should be placed in the `tests/` directory following existing project structure
|
||||||
|
- Use `uv run pytest [optional/path/to/test/file]` to run tests following project conventions
|
||||||
|
- Dash server will run locally for development, accessible via browser at http://127.0.0.1:8050
|
||||||
|
- Maintain backward compatibility with existing matplotlib visualizer
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
- [ ] 1.0 Setup Plotly + Dash Infrastructure and Dependencies
|
||||||
|
- [ ] 1.1 Add dash, plotly, and dash-bootstrap-components to pyproject.toml dependencies
|
||||||
|
- [ ] 1.2 Install and verify new dependencies with uv sync
|
||||||
|
- [ ] 1.3 Create basic dash_app.py with minimal Dash application setup
|
||||||
|
- [ ] 1.4 Verify Dash server can start and serve a basic "Hello World" page
|
||||||
|
- [ ] 1.5 Create project structure for interactive visualizer modules
|
||||||
|
|
||||||
|
- [ ] 2.0 Create Core Interactive Chart Layout with Synchronized Axes
|
||||||
|
- [ ] 2.1 Design 4-subplot layout using plotly.subplots.make_subplots with shared X-axis
|
||||||
|
- [ ] 2.2 Implement OHLC candlestick chart using plotly.graph_objects.Candlestick
|
||||||
|
- [ ] 2.3 Implement Volume bar chart using plotly.graph_objects.Bar
|
||||||
|
- [ ] 2.4 Implement OBI line chart using plotly.graph_objects.Scatter
|
||||||
|
- [ ] 2.5 Implement CVD line chart using plotly.graph_objects.Scatter
|
||||||
|
- [ ] 2.6 Configure synchronized zooming and panning across all subplots
|
||||||
|
- [ ] 2.7 Add vertical crosshair functionality spanning all charts
|
||||||
|
- [ ] 2.8 Apply professional dark theme and styling to charts
|
||||||
|
|
||||||
|
- [ ] 3.0 Implement Data Integration and Processing Pipeline
|
||||||
|
- [ ] 3.1 Create InteractiveVisualizer class maintaining set_db_path() and update_from_book() interface
|
||||||
|
- [ ] 3.2 Implement data_adapters.py for converting Book/Metric data to Plotly format
|
||||||
|
- [ ] 3.3 Create OHLC data transformation from existing bar calculation logic
|
||||||
|
- [ ] 3.4 Create metrics data transformation for OBI and CVD time series
|
||||||
|
- [ ] 3.5 Implement volume data aggregation and formatting
|
||||||
|
- [ ] 3.6 Add data caching mechanism for improved performance
|
||||||
|
- [ ] 3.7 Integrate with existing SQLiteOrderflowRepository for metrics loading
|
||||||
|
- [ ] 3.8 Handle multiple database file support seamlessly
|
||||||
|
|
||||||
|
- [ ] 4.0 Build Interactive Features and Navigation Controls
|
||||||
|
- [ ] 4.1 Implement zoom in/out functionality with mouse wheel and buttons
|
||||||
|
- [ ] 4.2 Implement pan functionality with click and drag
|
||||||
|
- [ ] 4.3 Add reset zoom and home view buttons
|
||||||
|
- [ ] 4.4 Create time range selector component for custom period selection
|
||||||
|
- [ ] 4.5 Implement time granularity controls (1min, 5min, 15min, 1hour, 6hour)
|
||||||
|
- [ ] 4.6 Add keyboard shortcuts for common navigation actions
|
||||||
|
- [ ] 4.7 Implement smooth interaction performance optimizations (<100ms response)
|
||||||
|
- [ ] 4.8 Add error handling for interaction edge cases
|
||||||
|
|
||||||
|
- [ ] 5.0 Develop Side Panel with Hover Information and CVD Reset Functionality
|
||||||
|
- [ ] 5.1 Create side panel layout using dash-bootstrap-components
|
||||||
|
- [ ] 5.2 Implement hover information display for OHLC data (timestamp, OHLC values, spread)
|
||||||
|
- [ ] 5.3 Implement hover information display for Volume data (timestamp, volume, buy/sell breakdown)
|
||||||
|
- [ ] 5.4 Implement hover information display for OBI data (timestamp, OBI value, bid/ask volumes)
|
||||||
|
- [ ] 5.5 Implement hover information display for CVD data (timestamp, CVD value, volume delta)
|
||||||
|
- [ ] 5.6 Add CVD reset functionality with click-to-reset on CVD chart
|
||||||
|
- [ ] 5.7 Implement visual markers for CVD reset points
|
||||||
|
- [ ] 5.8 Add CVD recalculation logic from reset point forward
|
||||||
|
- [ ] 5.9 Create control buttons in side panel (Reset CVD, Zoom Reset, etc.)
|
||||||
|
- [ ] 5.10 Optimize hover information update performance (<50ms latency)
|
||||||
@ -1,4 +1,4 @@
|
|||||||
"""Tests for SQLiteMetricsRepository table creation and schema validation."""
|
"""Tests for SQLiteOrderflowRepository table creation and schema validation."""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import sqlite3
|
import sqlite3
|
||||||
@ -7,7 +7,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||||
|
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
from models import Metric
|
from models import Metric
|
||||||
|
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ def test_create_metrics_table():
|
|||||||
db_path = Path(tmp_file.name)
|
db_path = Path(tmp_file.name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
# Create metrics table
|
# Create metrics table
|
||||||
repo.create_metrics_table(conn)
|
repo.create_metrics_table(conn)
|
||||||
@ -54,7 +54,7 @@ def test_insert_metrics_batch():
|
|||||||
db_path = Path(tmp_file.name)
|
db_path = Path(tmp_file.name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
# Create metrics table
|
# Create metrics table
|
||||||
repo.create_metrics_table(conn)
|
repo.create_metrics_table(conn)
|
||||||
@ -94,7 +94,7 @@ def test_load_metrics_by_timerange():
|
|||||||
db_path = Path(tmp_file.name)
|
db_path = Path(tmp_file.name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
# Create metrics table and insert test data
|
# Create metrics table and insert test data
|
||||||
repo.create_metrics_table(conn)
|
repo.create_metrics_table(conn)
|
||||||
|
|||||||
@ -9,7 +9,7 @@ from datetime import datetime
|
|||||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
||||||
|
|
||||||
from storage import Storage
|
from storage import Storage
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
|
|
||||||
|
|
||||||
def test_storage_calculates_and_stores_metrics():
|
def test_storage_calculates_and_stores_metrics():
|
||||||
@ -60,13 +60,13 @@ def test_storage_calculates_and_stores_metrics():
|
|||||||
storage.build_booktick_from_db(db_path, datetime.now())
|
storage.build_booktick_from_db(db_path, datetime.now())
|
||||||
|
|
||||||
# Verify metrics were calculated and stored
|
# Verify metrics were calculated and stored
|
||||||
metrics_repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with metrics_repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
# Check metrics table exists
|
# Check metrics table exists
|
||||||
assert metrics_repo.table_exists(conn, "metrics")
|
assert repo.table_exists(conn, "metrics")
|
||||||
|
|
||||||
# Load calculated metrics
|
# Load calculated metrics
|
||||||
metrics = metrics_repo.load_metrics_by_timerange(conn, 1000, 1000)
|
metrics = repo.load_metrics_by_timerange(conn, 1000, 1000)
|
||||||
assert len(metrics) == 1
|
assert len(metrics) == 1
|
||||||
|
|
||||||
metric = metrics[0]
|
metric = metrics[0]
|
||||||
|
|||||||
@ -9,7 +9,7 @@ sys.path.append(str(Path(__file__).resolve().parents[1]))
|
|||||||
|
|
||||||
from strategies import DefaultStrategy
|
from strategies import DefaultStrategy
|
||||||
from models import Book, BookSnapshot, OrderbookLevel, Metric
|
from models import Book, BookSnapshot, OrderbookLevel, Metric
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
from repositories.sqlite_repository import SQLiteOrderflowRepository
|
||||||
|
|
||||||
|
|
||||||
def test_strategy_uses_metric_calculator():
|
def test_strategy_uses_metric_calculator():
|
||||||
@ -41,9 +41,9 @@ def test_strategy_loads_stored_metrics():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Create test database with metrics
|
# Create test database with metrics
|
||||||
metrics_repo = SQLiteMetricsRepository(db_path)
|
repo = SQLiteOrderflowRepository(db_path)
|
||||||
with metrics_repo.connect() as conn:
|
with repo.connect() as conn:
|
||||||
metrics_repo.create_metrics_table(conn)
|
repo.create_metrics_table(conn)
|
||||||
|
|
||||||
# Insert test metrics
|
# Insert test metrics
|
||||||
test_metrics = [
|
test_metrics = [
|
||||||
@ -52,7 +52,7 @@ def test_strategy_loads_stored_metrics():
|
|||||||
Metric(snapshot_id=3, timestamp=1002, obi=0.3, cvd=20.0, best_bid=50004.0, best_ask=50005.0),
|
Metric(snapshot_id=3, timestamp=1002, obi=0.3, cvd=20.0, best_bid=50004.0, best_ask=50005.0),
|
||||||
]
|
]
|
||||||
|
|
||||||
metrics_repo.insert_metrics_batch(conn, test_metrics)
|
repo.insert_metrics_batch(conn, test_metrics)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
# Test strategy loading
|
# Test strategy loading
|
||||||
|
|||||||
@ -1,112 +0,0 @@
|
|||||||
"""Tests for Visualizer metrics integration."""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import sqlite3
|
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
sys.path.append(str(Path(__file__).resolve().parents[1]))
|
|
||||||
|
|
||||||
from visualizer import Visualizer
|
|
||||||
from models import Book, BookSnapshot, OrderbookLevel, Metric
|
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
|
||||||
|
|
||||||
|
|
||||||
def test_visualizer_loads_metrics():
|
|
||||||
"""Test that visualizer can load stored metrics from database."""
|
|
||||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp_file:
|
|
||||||
db_path = Path(tmp_file.name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create test database with metrics
|
|
||||||
metrics_repo = SQLiteMetricsRepository(db_path)
|
|
||||||
with metrics_repo.connect() as conn:
|
|
||||||
metrics_repo.create_metrics_table(conn)
|
|
||||||
|
|
||||||
# Insert test metrics
|
|
||||||
test_metrics = [
|
|
||||||
Metric(snapshot_id=1, timestamp=1000, obi=0.1, cvd=10.0, best_bid=50000.0, best_ask=50001.0),
|
|
||||||
Metric(snapshot_id=2, timestamp=1060, obi=0.2, cvd=15.0, best_bid=50002.0, best_ask=50003.0),
|
|
||||||
Metric(snapshot_id=3, timestamp=1120, obi=-0.1, cvd=12.0, best_bid=50004.0, best_ask=50005.0),
|
|
||||||
]
|
|
||||||
|
|
||||||
metrics_repo.insert_metrics_batch(conn, test_metrics)
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
# Test visualizer
|
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=200)
|
|
||||||
visualizer.set_db_path(db_path)
|
|
||||||
|
|
||||||
# Load metrics directly to test the method
|
|
||||||
loaded_metrics = visualizer._load_stored_metrics(1000, 1120)
|
|
||||||
|
|
||||||
assert len(loaded_metrics) == 3
|
|
||||||
assert loaded_metrics[0].obi == 0.1
|
|
||||||
assert loaded_metrics[0].cvd == 10.0
|
|
||||||
assert loaded_metrics[1].obi == 0.2
|
|
||||||
assert loaded_metrics[2].obi == -0.1
|
|
||||||
|
|
||||||
finally:
|
|
||||||
db_path.unlink(missing_ok=True)
|
|
||||||
|
|
||||||
|
|
||||||
def test_visualizer_handles_no_database():
|
|
||||||
"""Test that visualizer handles gracefully when no database path is set."""
|
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=200)
|
|
||||||
|
|
||||||
# No database path set - should return empty list
|
|
||||||
metrics = visualizer._load_stored_metrics(1000, 2000)
|
|
||||||
assert metrics == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_visualizer_handles_invalid_database():
|
|
||||||
"""Test that visualizer handles invalid database paths gracefully."""
|
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=200)
|
|
||||||
visualizer.set_db_path(Path("nonexistent.db"))
|
|
||||||
|
|
||||||
# Should handle error gracefully and return empty list
|
|
||||||
metrics = visualizer._load_stored_metrics(1000, 2000)
|
|
||||||
assert metrics == []
|
|
||||||
|
|
||||||
|
|
||||||
@patch('matplotlib.pyplot.subplots')
|
|
||||||
def test_visualizer_creates_four_subplots(mock_subplots):
|
|
||||||
"""Test that visualizer creates four subplots for OHLC, Volume, OBI, and CVD."""
|
|
||||||
# Mock the subplots creation
|
|
||||||
mock_fig = type('MockFig', (), {})()
|
|
||||||
mock_ax_ohlc = type('MockAx', (), {})()
|
|
||||||
mock_ax_volume = type('MockAx', (), {})()
|
|
||||||
mock_ax_obi = type('MockAx', (), {})()
|
|
||||||
mock_ax_cvd = type('MockAx', (), {})()
|
|
||||||
|
|
||||||
mock_subplots.return_value = (mock_fig, (mock_ax_ohlc, mock_ax_volume, mock_ax_obi, mock_ax_cvd))
|
|
||||||
|
|
||||||
# Create visualizer
|
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=200)
|
|
||||||
|
|
||||||
# Verify subplots were created correctly
|
|
||||||
mock_subplots.assert_called_once_with(4, 1, figsize=(12, 10), sharex=True)
|
|
||||||
assert visualizer.ax_ohlc == mock_ax_ohlc
|
|
||||||
assert visualizer.ax_volume == mock_ax_volume
|
|
||||||
assert visualizer.ax_obi == mock_ax_obi
|
|
||||||
assert visualizer.ax_cvd == mock_ax_cvd
|
|
||||||
|
|
||||||
|
|
||||||
def test_visualizer_update_from_book_with_empty_book():
|
|
||||||
"""Test that visualizer handles empty book gracefully."""
|
|
||||||
with patch('matplotlib.pyplot.subplots') as mock_subplots:
|
|
||||||
# Mock the subplots creation
|
|
||||||
mock_fig = type('MockFig', (), {'canvas': type('MockCanvas', (), {'draw_idle': lambda: None})()})()
|
|
||||||
mock_axes = [type('MockAx', (), {'clear': lambda: None})() for _ in range(4)]
|
|
||||||
mock_subplots.return_value = (mock_fig, tuple(mock_axes))
|
|
||||||
|
|
||||||
visualizer = Visualizer(window_seconds=60, max_bars=200)
|
|
||||||
|
|
||||||
# Test with empty book
|
|
||||||
book = Book()
|
|
||||||
|
|
||||||
# Should handle gracefully without errors
|
|
||||||
with patch('logging.warning') as mock_warning:
|
|
||||||
visualizer.update_from_book(book)
|
|
||||||
mock_warning.assert_called_once_with("Book has no snapshots to visualize")
|
|
||||||
343
uv.lock
generated
343
uv.lock
generated
@ -2,6 +2,66 @@ version = 1
|
|||||||
revision = 3
|
revision = 3
|
||||||
requires-python = ">=3.12"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "blinker"
|
||||||
|
version = "1.9.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2025.8.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "charset-normalizer"
|
||||||
|
version = "3.4.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "click"
|
name = "click"
|
||||||
version = "8.2.1"
|
version = "8.2.1"
|
||||||
@ -98,6 +158,55 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" },
|
{ url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dash"
|
||||||
|
version = "3.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "flask" },
|
||||||
|
{ name = "importlib-metadata" },
|
||||||
|
{ name = "nest-asyncio" },
|
||||||
|
{ name = "plotly" },
|
||||||
|
{ name = "requests" },
|
||||||
|
{ name = "retrying" },
|
||||||
|
{ name = "setuptools" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
{ name = "werkzeug" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/80/37/8b5621e0a0b3c6e81a8b6cd3f033aa4b750f53e288dd1a494a887a8a06e9/dash-3.2.0.tar.gz", hash = "sha256:93300b9b99498f8b8ed267e61c455b4ee1282c7e4d4b518600eec87ce6ddea55", size = 7558708, upload-time = "2025-07-31T19:18:59.014Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/36/e0010483ca49b9bf6f389631ccea07b3ff6b678d14d8c7a0a4357860c36a/dash-3.2.0-py3-none-any.whl", hash = "sha256:4c1819588d83bed2cbcf5807daa5c2380c8c85789a6935a733f018f04ad8a6a2", size = 7900661, upload-time = "2025-07-31T19:18:50.679Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dash-bootstrap-components"
|
||||||
|
version = "2.0.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "dash" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/cc/d4/5b7da808ff5acb3a6ca702f504d8ef05bc7d4c475b18dadefd783b1120c3/dash_bootstrap_components-2.0.4.tar.gz", hash = "sha256:c3206c0923774bbc6a6ddaa7822b8d9aa5326b0d3c1e7cd795cc975025fe2484", size = 115599, upload-time = "2025-08-20T19:42:09.449Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/38/1efeec8b4d741c09ccd169baf8a00c07a0176b58e418d4cd0c30dffedd22/dash_bootstrap_components-2.0.4-py3-none-any.whl", hash = "sha256:767cf0084586c1b2b614ccf50f79fe4525fdbbf8e3a161ed60016e584a14f5d1", size = 204044, upload-time = "2025-08-20T19:42:07.928Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flask"
|
||||||
|
version = "3.1.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "blinker" },
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "itsdangerous" },
|
||||||
|
{ name = "jinja2" },
|
||||||
|
{ name = "markupsafe" },
|
||||||
|
{ name = "werkzeug" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fonttools"
|
name = "fonttools"
|
||||||
version = "4.59.1"
|
version = "4.59.1"
|
||||||
@ -139,6 +248,27 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/0f/64/9d606e66d498917cd7a2ff24f558010d42d6fd4576d9dd57f0bd98333f5a/fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042", size = 1130094, upload-time = "2025-08-14T16:28:12.048Z" },
|
{ url = "https://files.pythonhosted.org/packages/0f/64/9d606e66d498917cd7a2ff24f558010d42d6fd4576d9dd57f0bd98333f5a/fonttools-4.59.1-py3-none-any.whl", hash = "sha256:647db657073672a8330608970a984d51573557f328030566521bc03415535042", size = 1130094, upload-time = "2025-08-14T16:28:12.048Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.10"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "importlib-metadata"
|
||||||
|
version = "8.7.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "zipp" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
version = "2.1.0"
|
version = "2.1.0"
|
||||||
@ -148,6 +278,27 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itsdangerous"
|
||||||
|
version = "2.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jinja2"
|
||||||
|
version = "3.1.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "markupsafe" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kiwisolver"
|
name = "kiwisolver"
|
||||||
version = "1.4.9"
|
version = "1.4.9"
|
||||||
@ -232,6 +383,44 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markupsafe"
|
||||||
|
version = "3.0.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matplotlib"
|
name = "matplotlib"
|
||||||
version = "3.10.5"
|
version = "3.10.5"
|
||||||
@ -295,6 +484,24 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "narwhals"
|
||||||
|
version = "2.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/01/8f/6b3d8c19540eaaa50778a8bbbe54e025d3f93aca6cdd5a4de3044c36f83c/narwhals-2.2.0.tar.gz", hash = "sha256:f6a34f2699acabe2c17339c104f0bec28b9f7a55fbc7f8d485d49bea72d12b8a", size = 547070, upload-time = "2025-08-25T07:51:58.904Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dd/54/1ecca75e51d7da8ca53d1ffa8636ef9077a6eaa31f43ade71360b3e6449a/narwhals-2.2.0-py3-none-any.whl", hash = "sha256:2b5e3d61a486fa4328c286b0c8018b3e781a964947ff725d66ba12f6d5ca3d2a", size = 401021, upload-time = "2025-08-25T07:51:56.97Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nest-asyncio"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "numpy"
|
name = "numpy"
|
||||||
version = "2.3.2"
|
version = "2.3.2"
|
||||||
@ -363,7 +570,11 @@ name = "orderflow-backtest"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
{ name = "dash" },
|
||||||
|
{ name = "dash-bootstrap-components" },
|
||||||
{ name = "matplotlib" },
|
{ name = "matplotlib" },
|
||||||
|
{ name = "pandas" },
|
||||||
|
{ name = "plotly" },
|
||||||
{ name = "pyqt5" },
|
{ name = "pyqt5" },
|
||||||
{ name = "typer" },
|
{ name = "typer" },
|
||||||
]
|
]
|
||||||
@ -375,7 +586,11 @@ dev = [
|
|||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
|
{ name = "dash", specifier = ">=2.18.0" },
|
||||||
|
{ name = "dash-bootstrap-components", specifier = ">=1.5.0" },
|
||||||
{ name = "matplotlib", specifier = ">=3.10.5" },
|
{ name = "matplotlib", specifier = ">=3.10.5" },
|
||||||
|
{ name = "pandas", specifier = ">=2.0.0" },
|
||||||
|
{ name = "plotly", specifier = ">=5.18.0" },
|
||||||
{ name = "pyqt5", specifier = ">=5.15.11" },
|
{ name = "pyqt5", specifier = ">=5.15.11" },
|
||||||
{ name = "typer", specifier = ">=0.16.1" },
|
{ name = "typer", specifier = ">=0.16.1" },
|
||||||
]
|
]
|
||||||
@ -392,6 +607,40 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pandas"
|
||||||
|
version = "2.3.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "numpy" },
|
||||||
|
{ name = "python-dateutil" },
|
||||||
|
{ name = "pytz" },
|
||||||
|
{ name = "tzdata" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/79/8e/0e90233ac205ad182bd6b422532695d2b9414944a280488105d598c70023/pandas-2.3.2.tar.gz", hash = "sha256:ab7b58f8f82706890924ccdfb5f48002b83d2b5a3845976a9fb705d36c34dcdb", size = 4488684, upload-time = "2025-08-21T10:28:29.257Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/db/614c20fb7a85a14828edd23f1c02db58a30abf3ce76f38806155d160313c/pandas-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fbb977f802156e7a3f829e9d1d5398f6192375a3e2d1a9ee0803e35fe70a2b9", size = 11587652, upload-time = "2025-08-21T10:27:15.888Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/b0/756e52f6582cade5e746f19bad0517ff27ba9c73404607c0306585c201b3/pandas-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b9b52693123dd234b7c985c68b709b0b009f4521000d0525f2b95c22f15944b", size = 10717686, upload-time = "2025-08-21T10:27:18.486Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/37/4c/dd5ccc1e357abfeee8353123282de17997f90ff67855f86154e5a13b81e5/pandas-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd281310d4f412733f319a5bc552f86d62cddc5f51d2e392c8787335c994175", size = 11278722, upload-time = "2025-08-21T10:27:21.149Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/a4/f7edcfa47e0a88cda0be8b068a5bae710bf264f867edfdf7b71584ace362/pandas-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96d31a6b4354e3b9b8a2c848af75d31da390657e3ac6f30c05c82068b9ed79b9", size = 11987803, upload-time = "2025-08-21T10:27:23.767Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f6/61/1bce4129f93ab66f1c68b7ed1c12bac6a70b1b56c5dab359c6bbcd480b52/pandas-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df4df0b9d02bb873a106971bb85d448378ef14b86ba96f035f50bbd3688456b4", size = 12766345, upload-time = "2025-08-21T10:27:26.6Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/46/80d53de70fee835531da3a1dae827a1e76e77a43ad22a8cd0f8142b61587/pandas-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:213a5adf93d020b74327cb2c1b842884dbdd37f895f42dcc2f09d451d949f811", size = 13439314, upload-time = "2025-08-21T10:27:29.213Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/28/30/8114832daff7489f179971dbc1d854109b7f4365a546e3ea75b6516cea95/pandas-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c13b81a9347eb8c7548f53fd9a4f08d4dfe996836543f805c987bafa03317ae", size = 10983326, upload-time = "2025-08-21T10:27:31.901Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/64/a2f7bf678af502e16b472527735d168b22b7824e45a4d7e96a4fbb634b59/pandas-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c6ecbac99a354a051ef21c5307601093cb9e0f4b1855984a084bfec9302699e", size = 11531061, upload-time = "2025-08-21T10:27:34.647Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/4c/c3d21b2b7769ef2f4c2b9299fcadd601efa6729f1357a8dbce8dd949ed70/pandas-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6f048aa0fd080d6a06cc7e7537c09b53be6642d330ac6f54a600c3ace857ee9", size = 10668666, upload-time = "2025-08-21T10:27:37.203Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/e2/f775ba76ecfb3424d7f5862620841cf0edb592e9abd2d2a5387d305fe7a8/pandas-2.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0064187b80a5be6f2f9c9d6bdde29372468751dfa89f4211a3c5871854cfbf7a", size = 11332835, upload-time = "2025-08-21T10:27:40.188Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8f/52/0634adaace9be2d8cac9ef78f05c47f3a675882e068438b9d7ec7ef0c13f/pandas-2.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac8c320bded4718b298281339c1a50fb00a6ba78cb2a63521c39bec95b0209b", size = 12057211, upload-time = "2025-08-21T10:27:43.117Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0b/9d/2df913f14b2deb9c748975fdb2491da1a78773debb25abbc7cbc67c6b549/pandas-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:114c2fe4f4328cf98ce5716d1532f3ab79c5919f95a9cfee81d9140064a2e4d6", size = 12749277, upload-time = "2025-08-21T10:27:45.474Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/af/da1a2417026bd14d98c236dba88e39837182459d29dcfcea510b2ac9e8a1/pandas-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:48fa91c4dfb3b2b9bfdb5c24cd3567575f4e13f9636810462ffed8925352be5a", size = 13415256, upload-time = "2025-08-21T10:27:49.885Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/22/3c/f2af1ce8840ef648584a6156489636b5692c162771918aa95707c165ad2b/pandas-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:12d039facec710f7ba305786837d0225a3444af7bbd9c15c32ca2d40d157ed8b", size = 10982579, upload-time = "2025-08-21T10:28:08.435Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f3/98/8df69c4097a6719e357dc249bf437b8efbde808038268e584421696cbddf/pandas-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c624b615ce97864eb588779ed4046186f967374185c047070545253a52ab2d57", size = 12028163, upload-time = "2025-08-21T10:27:52.232Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/23/f95cbcbea319f349e10ff90db488b905c6883f03cbabd34f6b03cbc3c044/pandas-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0cee69d583b9b128823d9514171cabb6861e09409af805b54459bd0c821a35c2", size = 11391860, upload-time = "2025-08-21T10:27:54.673Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ad/1b/6a984e98c4abee22058aa75bfb8eb90dce58cf8d7296f8bc56c14bc330b0/pandas-2.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2319656ed81124982900b4c37f0e0c58c015af9a7bbc62342ba5ad07ace82ba9", size = 11309830, upload-time = "2025-08-21T10:27:56.957Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/d5/f0486090eb18dd8710bf60afeaf638ba6817047c0c8ae5c6a25598665609/pandas-2.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37205ad6f00d52f16b6d09f406434ba928c1a1966e2771006a9033c736d30d2", size = 11883216, upload-time = "2025-08-21T10:27:59.302Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/86/692050c119696da19e20245bbd650d8dfca6ceb577da027c3a73c62a047e/pandas-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:837248b4fc3a9b83b9c6214699a13f069dc13510a6a6d7f9ba33145d2841a012", size = 12699743, upload-time = "2025-08-21T10:28:02.447Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/d7/612123674d7b17cf345aad0a10289b2a384bff404e0463a83c4a3a59d205/pandas-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d2c3554bd31b731cd6490d94a28f3abb8dd770634a9e06eb6d2911b9827db370", size = 13186141, upload-time = "2025-08-21T10:28:05.377Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pillow"
|
name = "pillow"
|
||||||
version = "11.3.0"
|
version = "11.3.0"
|
||||||
@ -458,6 +707,19 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" },
|
{ url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "plotly"
|
||||||
|
version = "6.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "narwhals" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a0/64/850de5076f4436410e1ce4f6a69f4313ef6215dfea155f3f6559335cad29/plotly-6.3.0.tar.gz", hash = "sha256:8840a184d18ccae0f9189c2b9a2943923fd5cae7717b723f36eef78f444e5a73", size = 6923926, upload-time = "2025-08-12T20:22:14.127Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/95/a9/12e2dc726ba1ba775a2c6922d5d5b4488ad60bdab0888c337c194c8e6de8/plotly-6.3.0-py3-none-any.whl", hash = "sha256:7ad806edce9d3cdd882eaebaf97c0c9e252043ed1ed3d382c3e3520ec07806d4", size = 9791257, upload-time = "2025-08-12T20:22:09.205Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pluggy"
|
name = "pluggy"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
@ -556,6 +818,39 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
|
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytz"
|
||||||
|
version = "2025.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests"
|
||||||
|
version = "2.32.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "charset-normalizer" },
|
||||||
|
{ name = "idna" },
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "retrying"
|
||||||
|
version = "1.4.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/c8/5a/b17e1e257d3e6f2e7758930e1256832c9ddd576f8631781e6a072914befa/retrying-1.4.2.tar.gz", hash = "sha256:d102e75d53d8d30b88562d45361d6c6c934da06fab31bd81c0420acb97a8ba39", size = 11411, upload-time = "2025-08-03T03:35:25.189Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/f3/6cd296376653270ac1b423bb30bd70942d9916b6978c6f40472d6ac038e7/retrying-1.4.2-py3-none-any.whl", hash = "sha256:bbc004aeb542a74f3569aeddf42a2516efefcdaff90df0eb38fbfbf19f179f59", size = 10859, upload-time = "2025-08-03T03:35:23.829Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rich"
|
name = "rich"
|
||||||
version = "14.1.0"
|
version = "14.1.0"
|
||||||
@ -569,6 +864,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" },
|
{ url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "setuptools"
|
||||||
|
version = "80.9.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "shellingham"
|
name = "shellingham"
|
||||||
version = "1.5.4"
|
version = "1.5.4"
|
||||||
@ -610,3 +914,42 @@ sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09
|
|||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
|
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tzdata"
|
||||||
|
version = "2025.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.5.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "werkzeug"
|
||||||
|
version = "3.1.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "markupsafe" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zipp"
|
||||||
|
version = "3.23.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
|
||||||
|
]
|
||||||
|
|||||||
256
visualizer.py
256
visualizer.py
@ -1,256 +0,0 @@
|
|||||||
# Set Qt5Agg as the default backend before importing pyplot
|
|
||||||
import os
|
|
||||||
import matplotlib
|
|
||||||
matplotlib.use('Qt5Agg')
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
import matplotlib.dates as mdates
|
|
||||||
from matplotlib.patches import Rectangle
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from collections import deque
|
|
||||||
from typing import Deque, Optional
|
|
||||||
from pathlib import Path
|
|
||||||
from storage import Book, BookSnapshot
|
|
||||||
from models import Metric
|
|
||||||
from repositories.sqlite_metrics_repository import SQLiteMetricsRepository
|
|
||||||
|
|
||||||
|
|
||||||
class Visualizer:
|
|
||||||
"""Render OHLC candles, volume, OBI and CVD charts from order book data.
|
|
||||||
|
|
||||||
Aggregates mid-prices into OHLC bars and displays OBI/CVD metrics beneath.
|
|
||||||
Uses Qt5Agg backend for interactive charts.
|
|
||||||
|
|
||||||
Public methods:
|
|
||||||
- update_from_book: process all snapshots from a Book and display charts
|
|
||||||
- set_db_path: set database path for loading stored metrics
|
|
||||||
- flush: finalize and draw the last in-progress bar
|
|
||||||
- show: display the Matplotlib window using Qt5Agg
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, window_seconds: int = 60, max_bars: int = 200) -> None:
|
|
||||||
# Create subplots: OHLC on top, Volume below, OBI and CVD at bottom
|
|
||||||
self.fig, (self.ax_ohlc, self.ax_volume, self.ax_obi, self.ax_cvd) = plt.subplots(4, 1, figsize=(12, 10), sharex=True)
|
|
||||||
self.window_seconds = int(max(1, window_seconds))
|
|
||||||
self.max_bars = int(max(1, max_bars))
|
|
||||||
self._db_path: Optional[Path] = None
|
|
||||||
|
|
||||||
# Bars buffer: list of tuples (start_ts, open, high, low, close)
|
|
||||||
self._bars: Deque[tuple[int, float, float, float, float, float]] = deque(maxlen=self.max_bars)
|
|
||||||
|
|
||||||
# Current in-progress bucket state
|
|
||||||
self._current_bucket_ts: Optional[int] = None
|
|
||||||
self._open: Optional[float] = None
|
|
||||||
self._high: Optional[float] = None
|
|
||||||
self._low: Optional[float] = None
|
|
||||||
self._close: Optional[float] = None
|
|
||||||
self._volume: float = 0.0
|
|
||||||
|
|
||||||
def _bucket_start(self, ts: int) -> int:
|
|
||||||
return int(ts) - (int(ts) % self.window_seconds)
|
|
||||||
|
|
||||||
def _normalize_ts_seconds(self, ts: int) -> int:
|
|
||||||
"""Return epoch seconds from possibly ms/us timestamps.
|
|
||||||
|
|
||||||
Heuristic based on magnitude:
|
|
||||||
- >1e14: microseconds → divide by 1e6
|
|
||||||
- >1e11: milliseconds → divide by 1e3
|
|
||||||
- else: seconds
|
|
||||||
"""
|
|
||||||
its = int(ts)
|
|
||||||
if its > 100_000_000_000_000: # > 1e14 → microseconds
|
|
||||||
return its // 1_000_000
|
|
||||||
if its > 100_000_000_000: # > 1e11 → milliseconds
|
|
||||||
return its // 1_000
|
|
||||||
return its
|
|
||||||
|
|
||||||
def set_db_path(self, db_path: Path) -> None:
|
|
||||||
"""Set the database path for loading stored metrics."""
|
|
||||||
self._db_path = db_path
|
|
||||||
|
|
||||||
def _load_stored_metrics(self, start_timestamp: int, end_timestamp: int) -> list[Metric]:
|
|
||||||
"""Load stored metrics from database for the given time range."""
|
|
||||||
if not self._db_path:
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
metrics_repo = SQLiteMetricsRepository(self._db_path)
|
|
||||||
with metrics_repo.connect() as conn:
|
|
||||||
return metrics_repo.load_metrics_by_timerange(conn, start_timestamp, end_timestamp)
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error loading metrics for visualization: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _append_current_bar(self) -> None:
|
|
||||||
if self._current_bucket_ts is None or self._open is None:
|
|
||||||
return
|
|
||||||
self._bars.append(
|
|
||||||
(
|
|
||||||
self._current_bucket_ts,
|
|
||||||
float(self._open),
|
|
||||||
float(self._high if self._high is not None else self._open),
|
|
||||||
float(self._low if self._low is not None else self._open),
|
|
||||||
float(self._close if self._close is not None else self._open),
|
|
||||||
float(self._volume),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _draw(self) -> None:
|
|
||||||
# Clear all subplots
|
|
||||||
self.ax_ohlc.clear()
|
|
||||||
self.ax_volume.clear()
|
|
||||||
self.ax_obi.clear()
|
|
||||||
self.ax_cvd.clear()
|
|
||||||
|
|
||||||
if not self._bars:
|
|
||||||
self.fig.canvas.draw_idle()
|
|
||||||
return
|
|
||||||
|
|
||||||
day_seconds = 24 * 60 * 60
|
|
||||||
width = self.window_seconds / day_seconds
|
|
||||||
|
|
||||||
# Draw OHLC candlesticks and extract volume data
|
|
||||||
volume_data = []
|
|
||||||
timestamps_ohlc = []
|
|
||||||
|
|
||||||
for start_ts, open_, high_, low_, close_, volume in self._bars:
|
|
||||||
# Collect volume data
|
|
||||||
dt = datetime.fromtimestamp(start_ts, tz=timezone.utc).replace(tzinfo=None)
|
|
||||||
x = mdates.date2num(dt)
|
|
||||||
volume_data.append((x, volume))
|
|
||||||
timestamps_ohlc.append(x)
|
|
||||||
|
|
||||||
# Wick
|
|
||||||
self.ax_ohlc.vlines(x + width / 2.0, low_, high_, color="black", linewidth=1.0)
|
|
||||||
|
|
||||||
# Body
|
|
||||||
lower = min(open_, close_)
|
|
||||||
height = max(1e-12, abs(close_ - open_))
|
|
||||||
color = "green" if close_ >= open_ else "red"
|
|
||||||
rect = Rectangle((x, lower), width, height, facecolor=color, edgecolor=color, linewidth=1.0)
|
|
||||||
self.ax_ohlc.add_patch(rect)
|
|
||||||
|
|
||||||
# Plot volume bars
|
|
||||||
if volume_data:
|
|
||||||
volumes_x = [v[0] for v in volume_data]
|
|
||||||
volumes_y = [v[1] for v in volume_data]
|
|
||||||
self.ax_volume.bar(volumes_x, volumes_y, width=width, alpha=0.7, color='blue', align='center')
|
|
||||||
|
|
||||||
# Draw metrics if available
|
|
||||||
if self._bars:
|
|
||||||
first_ts = self._bars[0][0]
|
|
||||||
last_ts = self._bars[-1][0]
|
|
||||||
metrics = self._load_stored_metrics(first_ts, last_ts + self.window_seconds)
|
|
||||||
|
|
||||||
if metrics:
|
|
||||||
# Prepare data for plotting
|
|
||||||
timestamps = [mdates.date2num(datetime.fromtimestamp(m.timestamp / 1000, tz=timezone.utc).replace(tzinfo=None)) for m in metrics]
|
|
||||||
obi_values = [m.obi for m in metrics]
|
|
||||||
cvd_values = [m.cvd for m in metrics]
|
|
||||||
|
|
||||||
# Plot OBI and CVD
|
|
||||||
self.ax_obi.plot(timestamps, obi_values, 'b-', linewidth=1, label='OBI')
|
|
||||||
self.ax_obi.axhline(y=0, color='gray', linestyle='--', alpha=0.5)
|
|
||||||
|
|
||||||
self.ax_cvd.plot(timestamps, cvd_values, 'r-', linewidth=1, label='CVD')
|
|
||||||
|
|
||||||
# Configure axes
|
|
||||||
self.ax_ohlc.set_title("Mid-price OHLC")
|
|
||||||
self.ax_ohlc.set_ylabel("Price")
|
|
||||||
|
|
||||||
self.ax_volume.set_title("Volume")
|
|
||||||
self.ax_volume.set_ylabel("Volume")
|
|
||||||
|
|
||||||
self.ax_obi.set_title("Order Book Imbalance (OBI)")
|
|
||||||
self.ax_obi.set_ylabel("OBI")
|
|
||||||
self.ax_obi.set_ylim(-1.1, 1.1)
|
|
||||||
|
|
||||||
self.ax_cvd.set_title("Cumulative Volume Delta (CVD)")
|
|
||||||
self.ax_cvd.set_ylabel("CVD")
|
|
||||||
self.ax_cvd.set_xlabel("Time (UTC)")
|
|
||||||
|
|
||||||
# Format time axis for bottom subplot only
|
|
||||||
self.ax_cvd.xaxis_date()
|
|
||||||
self.ax_cvd.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S"))
|
|
||||||
|
|
||||||
self.fig.tight_layout()
|
|
||||||
self.fig.canvas.draw_idle()
|
|
||||||
|
|
||||||
def update_from_book(self, book: Book) -> None:
|
|
||||||
"""Update the visualizer with all snapshots from the book.
|
|
||||||
|
|
||||||
Uses best bid/ask to compute mid-price; aggregates into OHLC bars.
|
|
||||||
Processes all snapshots in chronological order.
|
|
||||||
"""
|
|
||||||
if not book.snapshots:
|
|
||||||
logging.warning("Book has no snapshots to visualize")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Reset state before processing all snapshots
|
|
||||||
self._bars.clear()
|
|
||||||
self._current_bucket_ts = None
|
|
||||||
self._open = self._high = self._low = self._close = None
|
|
||||||
self._volume = 0.0
|
|
||||||
|
|
||||||
logging.info(f"Visualizing {len(book.snapshots)} snapshots")
|
|
||||||
|
|
||||||
# Process all snapshots in chronological order
|
|
||||||
snapshot_count = 0
|
|
||||||
for snapshot in sorted(book.snapshots, key=lambda s: s.timestamp):
|
|
||||||
snapshot_count += 1
|
|
||||||
if not snapshot.bids or not snapshot.asks:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
best_bid = max(snapshot.bids.keys())
|
|
||||||
best_ask = min(snapshot.asks.keys())
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
continue
|
|
||||||
|
|
||||||
mid = (float(best_bid) + float(best_ask)) / 2.0
|
|
||||||
ts_raw = int(snapshot.timestamp)
|
|
||||||
ts = self._normalize_ts_seconds(ts_raw)
|
|
||||||
bucket_ts = self._bucket_start(ts)
|
|
||||||
|
|
||||||
# Calculate volume from trades in this snapshot
|
|
||||||
snapshot_volume = sum(trade.size for trade in snapshot.trades)
|
|
||||||
|
|
||||||
# New bucket: close and store previous bar
|
|
||||||
if self._current_bucket_ts is None:
|
|
||||||
self._current_bucket_ts = bucket_ts
|
|
||||||
self._open = self._high = self._low = self._close = mid
|
|
||||||
self._volume = snapshot_volume
|
|
||||||
elif bucket_ts != self._current_bucket_ts:
|
|
||||||
self._append_current_bar()
|
|
||||||
self._current_bucket_ts = bucket_ts
|
|
||||||
self._open = self._high = self._low = self._close = mid
|
|
||||||
self._volume = snapshot_volume
|
|
||||||
else:
|
|
||||||
# Update current bucket OHLC and accumulate volume
|
|
||||||
if self._high is None or mid > self._high:
|
|
||||||
self._high = mid
|
|
||||||
if self._low is None or mid < self._low:
|
|
||||||
self._low = mid
|
|
||||||
self._close = mid
|
|
||||||
self._volume += snapshot_volume
|
|
||||||
|
|
||||||
# Finalize the last bar
|
|
||||||
self._append_current_bar()
|
|
||||||
|
|
||||||
logging.info(f"Created {len(self._bars)} OHLC bars from {snapshot_count} valid snapshots")
|
|
||||||
|
|
||||||
# Draw all bars
|
|
||||||
self._draw()
|
|
||||||
|
|
||||||
def flush(self) -> None:
|
|
||||||
"""Finalize the in-progress bar and redraw."""
|
|
||||||
self._append_current_bar()
|
|
||||||
# Reset current state (optional: keep last bucket running)
|
|
||||||
self._current_bucket_ts = None
|
|
||||||
self._open = self._high = self._low = self._close = None
|
|
||||||
self._volume = 0.0
|
|
||||||
self._draw()
|
|
||||||
|
|
||||||
def show(self) -> None:
|
|
||||||
plt.show()
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
"""Interactive demo for the Visualizer; run manually, not as a test."""
|
|
||||||
|
|
||||||
import random
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from visualizer import Visualizer
|
|
||||||
from storage import Book, BookSnapshot, OrderbookLevel
|
|
||||||
|
|
||||||
|
|
||||||
def demo_visualizer_creates_single_bar_on_flush() -> None:
|
|
||||||
vis = Visualizer(window_seconds=60, max_bars=10)
|
|
||||||
|
|
||||||
book = Book()
|
|
||||||
ts = datetime.now().timestamp()
|
|
||||||
|
|
||||||
snapshot = BookSnapshot(timestamp=int(ts))
|
|
||||||
for r in range(100):
|
|
||||||
snapshot.bids[100000 + random.random() * 100] = OrderbookLevel(
|
|
||||||
price=100000 + random.random() * 100,
|
|
||||||
size=1.0,
|
|
||||||
liquidation_count=0,
|
|
||||||
order_count=1,
|
|
||||||
)
|
|
||||||
snapshot.asks[100000 + random.random() * 100] = OrderbookLevel(
|
|
||||||
price=100000 + random.random() * 100,
|
|
||||||
size=1.0,
|
|
||||||
liquidation_count=0,
|
|
||||||
order_count=1,
|
|
||||||
)
|
|
||||||
|
|
||||||
book.add_snapshot(snapshot)
|
|
||||||
|
|
||||||
vis.update_from_book(book)
|
|
||||||
vis.flush()
|
|
||||||
vis.show()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
demo_visualizer_creates_single_bar_on_flush()
|
|
||||||
Loading…
x
Reference in New Issue
Block a user