214 lines
7.2 KiB
Python
214 lines
7.2 KiB
Python
"""
|
|
Backtesting Application
|
|
|
|
This module provides the main application class that orchestrates the entire
|
|
backtesting workflow. It coordinates configuration management, data loading,
|
|
backtest execution, and result output.
|
|
"""
|
|
|
|
import logging
|
|
import datetime
|
|
import concurrent.futures
|
|
from pathlib import Path
|
|
from typing import Optional, List, Dict, Any
|
|
|
|
from cycles.utils.storage import Storage
|
|
from cycles.utils.system import SystemUtils
|
|
from cycles.utils.config_manager import ConfigManager
|
|
from cycles.utils.results_processor import ResultsProcessor
|
|
from cycles.utils.backtest_runner import create_timeframe_tasks
|
|
|
|
|
|
class BacktestApplication:
|
|
"""
|
|
Main application class for coordinating backtesting workflow.
|
|
|
|
Orchestrates configuration management, data loading, backtest execution,
|
|
and result output in a clean, modular way.
|
|
"""
|
|
|
|
def __init__(self, config_path: Optional[str] = None):
|
|
"""
|
|
Initialize the backtesting application.
|
|
|
|
Args:
|
|
config_path: Optional path to configuration file
|
|
"""
|
|
self.config_manager = ConfigManager(config_path)
|
|
self.storage = Storage(logging=logging)
|
|
self.system_utils = SystemUtils(logging=logging)
|
|
self.results_processor = ResultsProcessor()
|
|
|
|
self.logger = logging.getLogger(__name__)
|
|
|
|
def load_data(self):
|
|
"""Load market data based on configuration."""
|
|
self.logger.info("Loading market data...")
|
|
|
|
data_1min = self.storage.load_data(
|
|
'btcusd_1-min_data.csv',
|
|
self.config_manager.start_date,
|
|
self.config_manager.stop_date
|
|
)
|
|
|
|
self.logger.info(f"Loaded {len(data_1min)} rows of 1-minute data")
|
|
return data_1min
|
|
|
|
def create_tasks(self, data_1min) -> List:
|
|
"""Create backtest tasks from configuration."""
|
|
self.logger.info("Creating backtest tasks...")
|
|
|
|
tasks = create_timeframe_tasks(
|
|
self.config_manager.timeframes,
|
|
data_1min,
|
|
self.config_manager
|
|
)
|
|
|
|
self.logger.info(f"Created {len(tasks)} backtest tasks")
|
|
return tasks
|
|
|
|
def execute_tasks(self, tasks: List, debug: bool = False) -> tuple:
|
|
"""
|
|
Execute backtest tasks.
|
|
|
|
Args:
|
|
tasks: List of TimeframeTask objects
|
|
debug: Whether to run in debug mode (sequential with plotting)
|
|
|
|
Returns:
|
|
Tuple of (results_rows, trade_rows)
|
|
"""
|
|
if debug:
|
|
return self._execute_tasks_debug(tasks)
|
|
else:
|
|
return self._execute_tasks_parallel(tasks)
|
|
|
|
def _execute_tasks_debug(self, tasks: List) -> tuple:
|
|
"""Execute tasks in debug mode (sequential)."""
|
|
self.logger.info("Executing tasks in debug mode (sequential)")
|
|
|
|
all_results_rows = []
|
|
all_trade_rows = []
|
|
|
|
for task in tasks:
|
|
self.logger.info(f"Processing timeframe: {task.timeframe}")
|
|
results, trades = task.execute(debug=True)
|
|
|
|
if results:
|
|
all_results_rows.append(results)
|
|
if trades:
|
|
all_trade_rows.extend(trades)
|
|
|
|
return all_results_rows, all_trade_rows
|
|
|
|
def _execute_tasks_parallel(self, tasks: List) -> tuple:
|
|
"""Execute tasks in parallel."""
|
|
workers = self.system_utils.get_optimal_workers()
|
|
self.logger.info(f"Executing tasks in parallel with {workers} workers")
|
|
|
|
all_results_rows = []
|
|
all_trade_rows = []
|
|
|
|
with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor:
|
|
# Submit all tasks
|
|
futures = {
|
|
executor.submit(task.execute, False): task
|
|
for task in tasks
|
|
}
|
|
|
|
# Collect results
|
|
for future in concurrent.futures.as_completed(futures):
|
|
task = futures[future]
|
|
try:
|
|
results, trades = future.result()
|
|
|
|
if results:
|
|
all_results_rows.append(results)
|
|
if trades:
|
|
all_trade_rows.extend(trades)
|
|
|
|
self.logger.info(f"Completed timeframe: {task.timeframe}")
|
|
|
|
except Exception as e:
|
|
self.logger.error(f"Task failed for timeframe {task.timeframe}: {e}")
|
|
|
|
return all_results_rows, all_trade_rows
|
|
|
|
def save_results(self, results_rows: List[Dict[str, Any]], trade_rows: List[Dict[str, Any]],
|
|
data_1min) -> None:
|
|
"""
|
|
Save backtest results to files.
|
|
|
|
Args:
|
|
results_rows: List of result summary rows
|
|
trade_rows: List of individual trade rows
|
|
data_1min: Original 1-minute data for metadata
|
|
"""
|
|
timestamp = datetime.datetime.now().strftime("%Y_%m_%d_%H_%M")
|
|
|
|
# Create metadata
|
|
metadata_lines = self.results_processor.create_metadata_lines(
|
|
self.config_manager, data_1min
|
|
)
|
|
|
|
# Save backtest results
|
|
backtest_filename = f"{timestamp}_backtest.csv"
|
|
backtest_fieldnames = [
|
|
"timeframe", "stop_loss_pct", "n_trades", "n_stop_loss", "win_rate",
|
|
"max_drawdown", "avg_trade", "profit_ratio", "final_usd", "total_fees_usd"
|
|
]
|
|
|
|
self.storage.write_backtest_results(
|
|
backtest_filename, backtest_fieldnames, results_rows, metadata_lines
|
|
)
|
|
|
|
# Save trade details
|
|
trades_fieldnames = [
|
|
"entry_time", "exit_time", "entry_price", "exit_price",
|
|
"profit_pct", "type", "fee_usd"
|
|
]
|
|
|
|
self.storage.write_trades(trade_rows, trades_fieldnames)
|
|
|
|
self.logger.info(f"Results saved to {backtest_filename}")
|
|
|
|
def run(self, debug: bool = False) -> None:
|
|
"""
|
|
Run the complete backtesting workflow.
|
|
|
|
Args:
|
|
debug: Whether to run in debug mode
|
|
"""
|
|
try:
|
|
self.logger.info("Starting backtesting workflow")
|
|
self.logger.info(f"Configuration: {self.config_manager}")
|
|
|
|
# Load data
|
|
data_1min = self.load_data()
|
|
|
|
# Create and execute tasks
|
|
tasks = self.create_tasks(data_1min)
|
|
results_rows, trade_rows = self.execute_tasks(tasks, debug)
|
|
|
|
# Save results
|
|
if results_rows or trade_rows:
|
|
self.save_results(results_rows, trade_rows, data_1min)
|
|
self.logger.info("Backtesting workflow completed successfully")
|
|
else:
|
|
self.logger.warning("No results generated")
|
|
|
|
except Exception as e:
|
|
self.logger.error(f"Backtesting workflow failed: {e}")
|
|
raise
|
|
|
|
|
|
def setup_logging() -> None:
|
|
"""Setup application logging configuration."""
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s [%(levelname)s] %(message)s",
|
|
handlers=[
|
|
logging.FileHandler("backtest.log"),
|
|
logging.StreamHandler()
|
|
]
|
|
) |