import logging import typer from pathlib import Path from datetime import datetime, timezone import subprocess import time import threading from db_interpreter import DBInterpreter from ohlc_processor import OHLCProcessor from desktop_app import MainWindow import sys from PySide6.QtWidgets import QApplication from PySide6.QtCore import Signal, QTimer def main(instrument: str = typer.Argument(..., help="Instrument to backtest, e.g. BTC-USDT"), start_date: str = typer.Argument(..., help="Start date, e.g. 2025-07-01"), end_date: str = typer.Argument(..., help="End date, e.g. 2025-08-01")): logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") start_date = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=timezone.utc) end_date = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc) databases_path = Path("../data/OKX") if not databases_path.exists(): logging.error(f"Database path does not exist: {databases_path}") return db_paths = list(databases_path.glob(f"{instrument}*.db")) db_paths.sort() if not db_paths: logging.error(f"No database files found for instrument {instrument} in {databases_path}") return logging.info(f"Found {len(db_paths)} database files: {[p.name for p in db_paths]}") processor = OHLCProcessor(aggregate_window_seconds=60 * 60) app = QApplication(sys.argv) desktop_app = MainWindow() desktop_app.show() timer = QTimer() timer.timeout.connect(lambda: desktop_app.update_data(processor)) timer.start(1000) def process_data(): try: for db_path in db_paths: db_name_parts = db_path.name.split(".")[0].split("-") if len(db_name_parts) < 5: logging.warning(f"Unexpected filename format: {db_path.name}") continue db_name = db_name_parts[2:5] db_date = datetime.strptime("".join(db_name), "%y%m%d").replace(tzinfo=timezone.utc) if db_date < start_date or db_date >= end_date: logging.info(f"Skipping {db_path.name} - outside date range") continue logging.info(f"Processing database: {db_path.name}") db_interpreter = DBInterpreter(db_path) batch_count = 0 for orderbook_update, trades in db_interpreter.stream(): batch_count += 1 processor.update_orderbook(orderbook_update) processor.process_trades(trades) # desktop_app.update_data(processor) logging.info("Data processing completed") except Exception as e: logging.error(f"Error in data processing: {e}") data_thread = threading.Thread(target=process_data, daemon=True) data_thread.start() app.exec() if __name__ == "__main__": typer.run(main)