#!/usr/bin/env python3 """ Strategy Parameter Optimization Script for IncrementalTrader This script provides comprehensive parameter optimization for trading strategies, specifically designed for testing MetaTrend strategy with various configurations including supertrend parameters, timeframes, and risk management settings. Features: - Parallel execution using multiple CPU cores - Configurable parameter grids for strategy and risk management - Comprehensive results analysis and reporting - Support for custom optimization metrics - Detailed logging and progress tracking - Individual strategy plotting and analysis Usage: python tasks/strategy_parameter_optimization.py --help """ import os import sys import argparse import logging import json import time import traceback from datetime import datetime, timedelta from typing import Dict, List, Any, Optional, Tuple from concurrent.futures import ProcessPoolExecutor, as_completed from itertools import product import pandas as pd import numpy as np from tqdm import tqdm # Import plotting libraries for result visualization try: import matplotlib.pyplot as plt import seaborn as sns plt.style.use('default') PLOTTING_AVAILABLE = True except ImportError: PLOTTING_AVAILABLE = False # Add project root to path project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, project_root) # Import IncrementalTrader components from IncrementalTrader.backtester import IncBacktester, BacktestConfig from IncrementalTrader.backtester.utils import DataLoader, SystemUtils, ResultsSaver from IncrementalTrader.strategies import MetaTrendStrategy from IncrementalTrader.trader import IncTrader # Set up logging logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ logging.StreamHandler(sys.stdout), logging.FileHandler('optimization.log') ] ) logger = logging.getLogger(__name__) # Reduce verbosity for entry/exit logging logging.getLogger('IncrementalTrader.strategies').setLevel(logging.WARNING) logging.getLogger('IncrementalTrader.trader').setLevel(logging.WARNING) class StrategyOptimizer: """ Advanced parameter optimization for IncrementalTrader strategies. This class provides comprehensive parameter optimization with parallel processing, sensitivity analysis, and detailed result reporting. """ def __init__(self): """Initialize the StrategyOptimizer.""" # Initialize utilities self.system_utils = SystemUtils() # Session tracking self.session_start_time = datetime.now() self.optimization_results = [] logger.info(f"StrategyOptimizer initialized") logger.info(f"System info: {self.system_utils.get_system_info()}") def generate_parameter_combinations(self, params_dict: Dict[str, List]) -> List[Dict[str, Dict]]: """ Generate all possible parameter combinations. Args: params_dict: Dictionary with strategy_params and trader_params lists Returns: List of parameter combinations """ strategy_params = params_dict.get('strategy_params', {}) trader_params = params_dict.get('trader_params', {}) # Generate all combinations combinations = [] # Get all strategy parameter combinations strategy_keys = list(strategy_params.keys()) strategy_values = list(strategy_params.values()) trader_keys = list(trader_params.keys()) trader_values = list(trader_params.values()) for strategy_combo in product(*strategy_values): strategy_dict = dict(zip(strategy_keys, strategy_combo)) for trader_combo in product(*trader_values): trader_dict = dict(zip(trader_keys, trader_combo)) combinations.append({ 'strategy_params': strategy_dict, 'trader_params': trader_dict }) return combinations def get_quick_test_params(self) -> Dict[str, List]: """ Get parameters for quick testing (smaller parameter space for faster execution). Returns: Dictionary with parameter ranges for quick testing """ return { "strategy_params": { "supertrend_periods": [[12, 10], [10, 8]], # Only 2 period combinations "supertrend_multipliers": [[3.0, 1.0], [2.0, 1.5]], # Only 2 multiplier combinations "min_trend_agreement": [0.5, 0.8], # Only 2 agreement levels "timeframe": ["5min", "15min"] # Only 2 timeframes }, "trader_params": { "stop_loss_pct": [0.02, 0.05], # Only 2 stop loss levels "portfolio_percent_per_trade": [0.8, 0.9] # Only 2 position sizes } } def get_comprehensive_params(self) -> Dict[str, List]: """ Get parameters for comprehensive optimization (larger parameter space). Returns: Dictionary with parameter ranges for comprehensive optimization """ return { "strategy_params": { "supertrend_periods": [ [12, 10, 11], [10, 8, 9], [14, 12, 13], [16, 14, 15], [20, 18, 19] ], "supertrend_multipliers": [ [3.0, 1.0, 2.0], [2.5, 1.5, 2.0], [3.5, 2.0, 2.5], [2.0, 1.0, 1.5], [4.0, 2.5, 3.0] ], "min_trend_agreement": [0.33, 0.5, 0.67, 0.8, 1.0], "timeframe": ["1min", "5min", "15min", "30min", "1h"] }, "trader_params": { "stop_loss_pct": [0.01, 0.015, 0.02, 0.025, 0.03, 0.04, 0.05], "portfolio_percent_per_trade": [0.1, 0.2, 0.3, 0.5, 0.8, 0.9, 1.0] } } def run_single_backtest(self, params: Dict[str, Any]) -> Dict[str, Any]: """ Run a single backtest with given parameters. Args: params: Dictionary containing all parameters for the backtest Returns: Dictionary with backtest results """ try: start_time = time.time() # Extract parameters strategy_params = params['strategy_params'] trader_params = params['trader_params'] data_file = params['data_file'] start_date = params['start_date'] end_date = params['end_date'] data_dir = params['data_dir'] # Create strategy name for identification strategy_name = f"MetaTrend_TF{strategy_params['timeframe']}_ST{len(strategy_params['supertrend_periods'])}_SL{trader_params['stop_loss_pct']}_POS{trader_params['portfolio_percent_per_trade']}" # Create strategy strategy = MetaTrendStrategy(name="metatrend", params=strategy_params) # Create backtest config (only with BacktestConfig-supported parameters) config = BacktestConfig( data_file=data_file, start_date=start_date, end_date=end_date, initial_usd=10000, data_dir=data_dir, stop_loss_pct=trader_params.get('stop_loss_pct', 0.0) ) # Create backtester backtester = IncBacktester(config) # Run backtest with trader-specific parameters results = backtester.run_single_strategy(strategy, trader_params) # Calculate additional metrics end_time = time.time() backtest_duration = end_time - start_time # Format results formatted_results = { "success": True, "strategy_name": strategy_name, "strategy_params": strategy_params, "trader_params": trader_params, "initial_usd": results["initial_usd"], "final_usd": results["final_usd"], "profit_ratio": results["profit_ratio"], "n_trades": results["n_trades"], "win_rate": results["win_rate"], "max_drawdown": results["max_drawdown"], "avg_trade": results["avg_trade"], "total_fees_usd": results["total_fees_usd"], "backtest_duration_seconds": backtest_duration, "data_points_processed": results.get("data_points", 0), "warmup_complete": results.get("warmup_complete", False), "trades": results.get("trades", []) } return formatted_results except Exception as e: logger.error(f"Error in backtest {params.get('strategy_params', {}).get('timeframe', 'unknown')}: {e}") return { "success": False, "error": str(e), "strategy_name": strategy_name if 'strategy_name' in locals() else "Unknown", "strategy_params": params.get('strategy_params', {}), "trader_params": params.get('trader_params', {}), "traceback": traceback.format_exc() } def optimize_parallel(self, params_dict: Dict[str, List], data_file: str, start_date: str, end_date: str, data_dir: str = "data", max_workers: Optional[int] = None) -> List[Dict[str, Any]]: """ Run parameter optimization using parallel processing with progress tracking. Args: params_dict: Dictionary with parameter ranges data_file: Data file for backtesting start_date: Start date for backtesting end_date: End date for backtesting data_dir: Directory containing data files max_workers: Maximum number of worker processes Returns: List of backtest results """ # Generate parameter combinations param_combinations = self.generate_parameter_combinations(params_dict) total_combinations = len(param_combinations) logger.info(f"Starting optimization with {total_combinations} parameter combinations") logger.info(f"Using {max_workers or self.system_utils.get_optimal_workers()} worker processes") # Prepare jobs jobs = [] for combo in param_combinations: job_params = { 'strategy_params': combo['strategy_params'], 'trader_params': combo['trader_params'], 'data_file': data_file, 'start_date': start_date, 'end_date': end_date, 'data_dir': data_dir } jobs.append(job_params) # Run parallel optimization with progress bar results = [] failed_jobs = [] max_workers = max_workers or self.system_utils.get_optimal_workers() with ProcessPoolExecutor(max_workers=max_workers) as executor: # Submit all jobs future_to_params = {executor.submit(self.run_single_backtest, job): job for job in jobs} # Process results with progress bar with tqdm(total=total_combinations, desc="Optimizing strategies", unit="strategy") as pbar: for future in as_completed(future_to_params): try: result = future.result(timeout=300) # 5 minute timeout per job results.append(result) if result['success']: pbar.set_postfix({ 'Success': f"{len([r for r in results if r['success']])}/{len(results)}", 'Best Profit': f"{max([r.get('profit_ratio', 0) for r in results if r['success']], default=0):.1%}" }) else: failed_jobs.append(future_to_params[future]) except Exception as e: logger.error(f"Job failed with exception: {e}") failed_jobs.append(future_to_params[future]) results.append({ "success": False, "error": f"Job exception: {e}", "strategy_name": "Failed", "strategy_params": future_to_params[future].get('strategy_params', {}), "trader_params": future_to_params[future].get('trader_params', {}) }) pbar.update(1) # Log summary successful_results = [r for r in results if r['success']] logger.info(f"Optimization completed: {len(successful_results)}/{total_combinations} successful") if failed_jobs: logger.warning(f"{len(failed_jobs)} jobs failed") return results def main(): """Main function for running parameter optimization.""" parser = argparse.ArgumentParser(description="Strategy Parameter Optimization") parser.add_argument("--data-file", type=str, default="btcusd_1-min_data.csv", help="Data file for backtesting") parser.add_argument("--data-dir", type=str, default="data", help="Directory containing data files") parser.add_argument("--results-dir", type=str, default="results", help="Directory for saving results") parser.add_argument("--start-date", type=str, default="2023-01-01", help="Start date for backtesting (YYYY-MM-DD)") parser.add_argument("--end-date", type=str, default="2023-01-31", help="End date for backtesting (YYYY-MM-DD)") parser.add_argument("--max-workers", type=int, default=None, help="Maximum number of worker processes") parser.add_argument("--quick-test", action="store_true", help="Run quick test with smaller parameter space") parser.add_argument("--custom-params", type=str, default=None, help="Path to custom parameter configuration JSON file") args = parser.parse_args() # Adjust dates for quick test - use only 3 days for very fast testing if args.quick_test: args.start_date = "2023-01-01" args.end_date = "2023-01-03" # Only 3 days for quick test logger.info("Quick test mode: Using shortened time period (2023-01-01 to 2023-01-03)") # Create optimizer optimizer = StrategyOptimizer() # Determine parameter configuration if args.custom_params: # Load custom parameters from JSON file if not os.path.exists(args.custom_params): logger.error(f"Custom parameter file not found: {args.custom_params}") return with open(args.custom_params, 'r') as f: params_dict = json.load(f) logger.info(f"Using custom parameters from: {args.custom_params}") elif args.quick_test: # Quick test parameters params_dict = optimizer.get_quick_test_params() logger.info("Using quick test parameter configuration") else: # Comprehensive optimization parameters params_dict = optimizer.get_comprehensive_params() logger.info("Using comprehensive optimization parameter configuration") # Log optimization details total_combinations = len(optimizer.generate_parameter_combinations(params_dict)) logger.info(f"Total parameter combinations: {total_combinations}") logger.info(f"Data file: {args.data_file}") logger.info(f"Date range: {args.start_date} to {args.end_date}") logger.info(f"Results directory: {args.results_dir}") # Check if data file exists data_path = os.path.join(args.data_dir, args.data_file) if not os.path.exists(data_path): logger.error(f"Data file not found: {data_path}") return # Create results directory os.makedirs(args.results_dir, exist_ok=True) try: # Run optimization session_start_time = datetime.now() logger.info("Starting parameter optimization...") results = optimizer.optimize_parallel( params_dict=params_dict, data_file=args.data_file, start_date=args.start_date, end_date=args.end_date, data_dir=args.data_dir, max_workers=args.max_workers ) # Save results saver = ResultsSaver(args.results_dir) # Generate base filename timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") test_type = "quick_test" if args.quick_test else "comprehensive" base_filename = f"metatrend_optimization_{test_type}" # Save comprehensive results saver.save_comprehensive_results( results=results, base_filename=base_filename, session_start_time=session_start_time ) # Calculate and display summary statistics successful_results = [r for r in results if r['success']] if successful_results: # Sort by profit ratio sorted_results = sorted(successful_results, key=lambda x: x['profit_ratio'], reverse=True) print(f"\nOptimization Summary:") print(f" Successful runs: {len(successful_results)}/{len(results)}") print(f" Total duration: {(datetime.now() - session_start_time).total_seconds():.1f} seconds") print(f"\nTop 5 Strategies:") for i, result in enumerate(sorted_results[:5], 1): print(f" {i}. {result['strategy_name']}") print(f" Profit: {result['profit_ratio']:.1%} (${result['final_usd']:.2f})") print(f" Trades: {result['n_trades']} | Win Rate: {result['win_rate']:.1%}") print(f" Max DD: {result['max_drawdown']:.1%}") else: print(f"\nNo successful optimization runs completed") logger.error("All optimization runs failed") print(f"\nFull results saved to: {args.results_dir}/") except KeyboardInterrupt: logger.info("Optimization interrupted by user") except Exception as e: logger.error(f"Optimization failed: {e}") traceback.print_exc() if __name__ == "__main__": main()