Compare commits

..

No commits in common. "main" and "xgboost" have entirely different histories.

27 changed files with 469 additions and 4792 deletions

View File

@ -1,8 +0,0 @@
---
description:
globs:
alwaysApply: true
---
- use UV for package management
- ./docs folder for the documetation and the modules description, update related files if logic changed

362
.gitignore vendored
View File

@ -1,181 +1,181 @@
# ---> Python
/data/*.db
/credentials/*.json
*.csv
*.png
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
/data/*.npy
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
An introduction to trading cycles.pdf
An introduction to trading cycles.txt
README.md
.vscode/launch.json
data/btcusd_1-day_data.csv
data/btcusd_1-min_data.csv
# ---> Python
/data/*.db
/credentials/*.json
*.csv
*.png
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
/data/*.npy
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
An introduction to trading cycles.pdf
An introduction to trading cycles.txt
README.md
.vscode/launch.json
data/btcusd_1-day_data.csv
data/btcusd_1-min_data.csv

View File

@ -1,29 +0,0 @@
{
"start_date": "2025-01-01",
"stop_date": null,
"initial_usd": 10000,
"timeframes": ["1min"],
"strategies": [
{
"name": "bbrs",
"weight": 1.0,
"params": {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"trending_rsi_threshold": [30, 70],
"trending_bb_multiplier": 2.5,
"sideways_rsi_threshold": [40, 60],
"sideways_bb_multiplier": 1.8,
"strategy_name": "MarketRegimeStrategy",
"SqueezeStrategy": true,
"stop_loss_pct": 0.05
}
}
],
"combination_rules": {
"entry": "any",
"exit": "any",
"min_confidence": 0.5
}
}

View File

@ -1,29 +0,0 @@
{
"start_date": "2024-01-01",
"stop_date": "2024-01-31",
"initial_usd": 10000,
"timeframes": ["1min"],
"stop_loss_pcts": [0.05],
"strategies": [
{
"name": "bbrs",
"weight": 1.0,
"params": {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"trending_rsi_threshold": [30, 70],
"trending_bb_multiplier": 2.5,
"sideways_rsi_threshold": [40, 60],
"sideways_bb_multiplier": 1.8,
"strategy_name": "MarketRegimeStrategy",
"SqueezeStrategy": true
}
}
],
"combination_rules": {
"entry": "any",
"exit": "any",
"min_confidence": 0.5
}
}

View File

@ -1,37 +0,0 @@
{
"start_date": "2025-03-01",
"stop_date": "2025-03-15",
"initial_usd": 10000,
"timeframes": ["15min"],
"strategies": [
{
"name": "default",
"weight": 0.6,
"params": {
"timeframe": "15min",
"stop_loss_pct": 0.03
}
},
{
"name": "bbrs",
"weight": 0.4,
"params": {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"trending_rsi_threshold": [30, 70],
"trending_bb_multiplier": 2.5,
"sideways_rsi_threshold": [40, 60],
"sideways_bb_multiplier": 1.8,
"strategy_name": "MarketRegimeStrategy",
"SqueezeStrategy": true,
"stop_loss_pct": 0.05
}
}
],
"combination_rules": {
"entry": "weighted_consensus",
"exit": "any",
"min_confidence": 0.6
}
}

View File

@ -1,21 +0,0 @@
{
"start_date": "2024-01-01",
"stop_date": null,
"initial_usd": 10000,
"timeframes": ["15min"],
"strategies": [
{
"name": "default",
"weight": 1.0,
"params": {
"timeframe": "15min",
"stop_loss_pct": 0.03
}
}
],
"combination_rules": {
"entry": "any",
"exit": "any",
"min_confidence": 0.5
}
}

View File

@ -1,21 +0,0 @@
{
"start_date": "2024-01-01",
"stop_date": "2024-01-31",
"initial_usd": 10000,
"timeframes": ["5min"],
"strategies": [
{
"name": "default",
"weight": 1.0,
"params": {
"timeframe": "5min",
"stop_loss_pct": 0.03
}
}
],
"combination_rules": {
"entry": "any",
"exit": "any",
"min_confidence": 0.5
}
}

View File

@ -1,415 +0,0 @@
import pandas as pd
import numpy as np
from cycles.Analysis.boillinger_band import BollingerBands
from cycles.Analysis.rsi import RSI
from cycles.utils.data_utils import aggregate_to_daily, aggregate_to_hourly, aggregate_to_minutes
class BollingerBandsStrategy:
def __init__(self, config = None, logging = None):
if config is None:
raise ValueError("Config must be provided.")
self.config = config
self.logging = logging
def _ensure_datetime_index(self, data):
"""
Ensure the DataFrame has a DatetimeIndex for proper time-series operations.
If the DataFrame has a 'timestamp' column but not a DatetimeIndex, convert it.
Args:
data (DataFrame): Input DataFrame
Returns:
DataFrame: DataFrame with proper DatetimeIndex
"""
if data.empty:
return data
# Check if we have a DatetimeIndex already
if isinstance(data.index, pd.DatetimeIndex):
return data
# Check if we have a 'timestamp' column that we can use as index
if 'timestamp' in data.columns:
data_copy = data.copy()
# Convert timestamp column to datetime if it's not already
if not pd.api.types.is_datetime64_any_dtype(data_copy['timestamp']):
data_copy['timestamp'] = pd.to_datetime(data_copy['timestamp'])
# Set timestamp as index and drop the column
data_copy = data_copy.set_index('timestamp')
if self.logging:
self.logging.info("Converted 'timestamp' column to DatetimeIndex for strategy processing.")
return data_copy
# If we have a regular index but it might be datetime strings, try to convert
try:
if data.index.dtype == 'object':
data_copy = data.copy()
data_copy.index = pd.to_datetime(data_copy.index)
if self.logging:
self.logging.info("Converted index to DatetimeIndex for strategy processing.")
return data_copy
except:
pass
# If we can't create a proper DatetimeIndex, warn and return as-is
if self.logging:
self.logging.warning("Could not create DatetimeIndex for strategy processing. Time-based operations may fail.")
return data
def run(self, data, strategy_name):
# Ensure proper DatetimeIndex before processing
data = self._ensure_datetime_index(data)
if strategy_name == "MarketRegimeStrategy":
result = self.MarketRegimeStrategy(data)
return self.standardize_output(result, strategy_name)
elif strategy_name == "CryptoTradingStrategy":
result = self.CryptoTradingStrategy(data)
return self.standardize_output(result, strategy_name)
else:
if self.logging is not None:
self.logging.warning(f"Strategy {strategy_name} not found. Using no_strategy instead.")
return self.no_strategy(data)
def standardize_output(self, data, strategy_name):
"""
Standardize column names across different strategies to ensure consistent plotting and analysis
Args:
data (DataFrame): Strategy output DataFrame
strategy_name (str): Name of the strategy that generated this data
Returns:
DataFrame: Data with standardized column names
"""
if data.empty:
return data
# Create a copy to avoid modifying the original
standardized = data.copy()
# Standardize column names based on strategy
if strategy_name == "MarketRegimeStrategy":
# MarketRegimeStrategy already has standard column names for most fields
# Just ensure all standard columns exist
pass
elif strategy_name == "CryptoTradingStrategy":
# Map strategy-specific column names to standard names
column_mapping = {
'UpperBand_15m': 'UpperBand',
'LowerBand_15m': 'LowerBand',
'SMA_15m': 'SMA',
'RSI_15m': 'RSI',
'VolumeMA_15m': 'VolumeMA',
# Keep StopLoss and TakeProfit as they are
}
# Add standard columns from mapped columns
for old_col, new_col in column_mapping.items():
if old_col in standardized.columns and new_col not in standardized.columns:
standardized[new_col] = standardized[old_col]
# Add additional strategy-specific data as metadata columns
if 'UpperBand_1h' in standardized.columns:
standardized['UpperBand_1h_meta'] = standardized['UpperBand_1h']
if 'LowerBand_1h' in standardized.columns:
standardized['LowerBand_1h_meta'] = standardized['LowerBand_1h']
# Ensure all strategies have BBWidth if possible
if 'BBWidth' not in standardized.columns and 'UpperBand' in standardized.columns and 'LowerBand' in standardized.columns:
standardized['BBWidth'] = (standardized['UpperBand'] - standardized['LowerBand']) / standardized['SMA'] if 'SMA' in standardized.columns else np.nan
return standardized
def no_strategy(self, data):
"""No strategy: returns False for both buy and sell conditions"""
buy_condition = pd.Series([False] * len(data), index=data.index)
sell_condition = pd.Series([False] * len(data), index=data.index)
return buy_condition, sell_condition
def rsi_bollinger_confirmation(self, rsi, window=14, std_mult=1.5):
"""Calculate RSI Bollinger Bands for confirmation
Args:
rsi (Series): RSI values
window (int): Rolling window for SMA
std_mult (float): Standard deviation multiplier
Returns:
tuple: (oversold condition, overbought condition)
"""
valid_rsi = ~rsi.isna()
if not valid_rsi.any():
# Return empty Series if no valid RSI data
return pd.Series(False, index=rsi.index), pd.Series(False, index=rsi.index)
rsi_sma = rsi.rolling(window).mean()
rsi_std = rsi.rolling(window).std()
upper_rsi_band = rsi_sma + std_mult * rsi_std
lower_rsi_band = rsi_sma - std_mult * rsi_std
return (rsi < lower_rsi_band), (rsi > upper_rsi_band)
def MarketRegimeStrategy(self, data):
"""Optimized Bollinger Bands + RSI Strategy for Crypto Trading (Including Sideways Markets)
with adaptive Bollinger Bands
This advanced strategy combines volatility analysis, momentum confirmation, and regime detection
to adapt to Bitcoin's unique market conditions.
Entry Conditions:
- Trending Market (Breakout Mode):
Buy: Price < Lower Band RSI < 50 Volume Spike (1.5× 20D Avg)
Sell: Price > Upper Band RSI > 50 Volume Spike
- Sideways Market (Mean Reversion):
Buy: Price Lower Band RSI 40
Sell: Price Upper Band RSI 60
Enhanced with RSI Bollinger Squeeze for signal confirmation when enabled.
Returns:
DataFrame: A unified DataFrame containing original data, BB, RSI, and signals.
"""
data = aggregate_to_hourly(data, 1)
# data = aggregate_to_daily(data)
# Calculate Bollinger Bands
bb_calculator = BollingerBands(config=self.config)
# Ensure we are working with a copy to avoid modifying the original DataFrame upstream
data_bb = bb_calculator.calculate(data.copy())
# Calculate RSI
rsi_calculator = RSI(config=self.config)
# Use the original data's copy for RSI calculation as well, to maintain index integrity
data_with_rsi = rsi_calculator.calculate(data.copy(), price_column='close')
# Combine BB and RSI data into a single DataFrame for signal generation
# Ensure indices are aligned; they should be as both are from data.copy()
if 'RSI' in data_with_rsi.columns:
data_bb['RSI'] = data_with_rsi['RSI']
else:
# If RSI wasn't calculated (e.g., not enough data), create a dummy column with NaNs
# to prevent errors later, though signals won't be generated.
data_bb['RSI'] = pd.Series(index=data_bb.index, dtype=float)
if self.logging:
self.logging.warning("RSI column not found or not calculated. Signals relying on RSI may not be generated.")
# Initialize conditions as all False
buy_condition = pd.Series(False, index=data_bb.index)
sell_condition = pd.Series(False, index=data_bb.index)
# Create masks for different market regimes
# MarketRegime is expected to be in data_bb from BollingerBands calculation
sideways_mask = data_bb['MarketRegime'] > 0
trending_mask = data_bb['MarketRegime'] <= 0
valid_data_mask = ~data_bb['MarketRegime'].isna() # Handle potential NaN values
# Calculate volume spike (≥1.5× 20D Avg)
# 'volume' column should be present in the input 'data', and thus in 'data_bb'
if 'volume' in data_bb.columns:
volume_20d_avg = data_bb['volume'].rolling(window=20).mean()
volume_spike = data_bb['volume'] >= 1.5 * volume_20d_avg
# Additional volume contraction filter for sideways markets
volume_30d_avg = data_bb['volume'].rolling(window=30).mean()
volume_contraction = data_bb['volume'] < 0.7 * volume_30d_avg
else:
# If volume data is not available, assume no volume spike
volume_spike = pd.Series(False, index=data_bb.index)
volume_contraction = pd.Series(False, index=data_bb.index)
if self.logging is not None:
self.logging.warning("Volume data not available. Volume conditions will not be triggered.")
# Calculate RSI Bollinger Squeeze confirmation
# RSI column is now part of data_bb
if 'RSI' in data_bb.columns and not data_bb['RSI'].isna().all():
oversold_rsi, overbought_rsi = self.rsi_bollinger_confirmation(data_bb['RSI'])
else:
oversold_rsi = pd.Series(False, index=data_bb.index)
overbought_rsi = pd.Series(False, index=data_bb.index)
if self.logging is not None and ('RSI' not in data_bb.columns or data_bb['RSI'].isna().all()):
self.logging.warning("RSI data not available or all NaN. RSI Bollinger Squeeze will not be triggered.")
# Calculate conditions for sideways market (Mean Reversion)
if sideways_mask.any():
sideways_buy = (data_bb['close'] <= data_bb['LowerBand']) & (data_bb['RSI'] <= 40)
sideways_sell = (data_bb['close'] >= data_bb['UpperBand']) & (data_bb['RSI'] >= 60)
# Add enhanced confirmation for sideways markets
if self.config.get("SqueezeStrategy", False):
sideways_buy = sideways_buy & oversold_rsi & volume_contraction
sideways_sell = sideways_sell & overbought_rsi & volume_contraction
# Apply only where market is sideways and data is valid
buy_condition = buy_condition | (sideways_buy & sideways_mask & valid_data_mask)
sell_condition = sell_condition | (sideways_sell & sideways_mask & valid_data_mask)
# Calculate conditions for trending market (Breakout Mode)
if trending_mask.any():
trending_buy = (data_bb['close'] < data_bb['LowerBand']) & (data_bb['RSI'] < 50) & volume_spike
trending_sell = (data_bb['close'] > data_bb['UpperBand']) & (data_bb['RSI'] > 50) & volume_spike
# Add enhanced confirmation for trending markets
if self.config.get("SqueezeStrategy", False):
trending_buy = trending_buy & oversold_rsi
trending_sell = trending_sell & overbought_rsi
# Apply only where market is trending and data is valid
buy_condition = buy_condition | (trending_buy & trending_mask & valid_data_mask)
sell_condition = sell_condition | (trending_sell & trending_mask & valid_data_mask)
# Add buy/sell conditions as columns to the DataFrame
data_bb['BuySignal'] = buy_condition
data_bb['SellSignal'] = sell_condition
return data_bb
# Helper functions for CryptoTradingStrategy
def _volume_confirmation_crypto(self, current_volume, volume_ma):
"""Check volume surge against moving average for crypto strategy"""
if pd.isna(current_volume) or pd.isna(volume_ma) or volume_ma == 0:
return False
return current_volume > 1.5 * volume_ma
def _multi_timeframe_signal_crypto(self, current_price, rsi_value,
lower_band_15m, lower_band_1h,
upper_band_15m, upper_band_1h):
"""Generate signals with multi-timeframe confirmation for crypto strategy"""
# Ensure all inputs are not NaN before making comparisons
if any(pd.isna(val) for val in [current_price, rsi_value, lower_band_15m, lower_band_1h, upper_band_15m, upper_band_1h]):
return False, False
buy_signal = (current_price <= lower_band_15m and
current_price <= lower_band_1h and
rsi_value < 35)
sell_signal = (current_price >= upper_band_15m and
current_price >= upper_band_1h and
rsi_value > 65)
return buy_signal, sell_signal
def CryptoTradingStrategy(self, data):
"""Core trading algorithm with risk management
- Multi-Timeframe Confirmation: Combines 15-minute and 1-hour Bollinger Bands
- Adaptive Volatility Filtering: Uses ATR for dynamic stop-loss/take-profit
- Volume Spike Detection: Requires 1.5× average volume for confirmation
- EMA-Smoothed RSI: Reduces false signals in choppy markets
- Regime-Adaptive Parameters:
- Trending: 2σ bands, RSI 35/65 thresholds
- Sideways: 1.8σ bands, RSI 40/60 thresholds
- Strategy Logic:
- Long Entry: Price both 15m & 1h lower bands + RSI < 35 + Volume surge
- Short Entry: Price both 15m & 1h upper bands + RSI > 65 + Volume surge
- Exit: 2:1 risk-reward ratio with ATR-based stops
"""
if data.empty or 'close' not in data.columns or 'volume' not in data.columns:
if self.logging:
self.logging.warning("CryptoTradingStrategy: Input data is empty or missing 'close'/'volume' columns.")
return pd.DataFrame() # Return empty DataFrame if essential data is missing
print(f"data: {data.head()}")
# Aggregate data
data_15m = aggregate_to_minutes(data.copy(), 15)
data_1h = aggregate_to_hourly(data.copy(), 1)
if data_15m.empty or data_1h.empty:
if self.logging:
self.logging.warning("CryptoTradingStrategy: Not enough data for 15m or 1h aggregation.")
return pd.DataFrame() # Return original data if aggregation fails
# --- Calculate indicators for 15m timeframe ---
# Ensure 'close' and 'volume' exist before trying to access them
if 'close' not in data_15m.columns or 'volume' not in data_15m.columns:
if self.logging: self.logging.warning("CryptoTradingStrategy: 15m data missing close or volume.")
return data # Or an empty DF
price_data_15m = data_15m['close']
volume_data_15m = data_15m['volume']
upper_15m, sma_15m, lower_15m = BollingerBands.calculate_custom_bands(price_data_15m, window=20, num_std=2, min_periods=1)
# Use the static method from RSI class
rsi_15m = RSI.calculate_custom_rsi(price_data_15m, window=14, smoothing='EMA')
volume_ma_15m = volume_data_15m.rolling(window=20, min_periods=1).mean()
# Add 15m indicators to data_15m DataFrame
data_15m['UpperBand_15m'] = upper_15m
data_15m['SMA_15m'] = sma_15m
data_15m['LowerBand_15m'] = lower_15m
data_15m['RSI_15m'] = rsi_15m
data_15m['VolumeMA_15m'] = volume_ma_15m
# --- Calculate indicators for 1h timeframe ---
if 'close' not in data_1h.columns:
if self.logging: self.logging.warning("CryptoTradingStrategy: 1h data missing close.")
return data_15m # Return 15m data as 1h failed
price_data_1h = data_1h['close']
# Use the static method from BollingerBands class, setting min_periods to 1 explicitly
upper_1h, _, lower_1h = BollingerBands.calculate_custom_bands(price_data_1h, window=50, num_std=1.8, min_periods=1)
# Add 1h indicators to a temporary DataFrame to be merged
df_1h_indicators = pd.DataFrame(index=data_1h.index)
df_1h_indicators['UpperBand_1h'] = upper_1h
df_1h_indicators['LowerBand_1h'] = lower_1h
# Merge 1h indicators into 15m DataFrame
# Use reindex and ffill to propagate 1h values to 15m intervals
data_15m = pd.merge(data_15m, df_1h_indicators, left_index=True, right_index=True, how='left')
data_15m['UpperBand_1h'] = data_15m['UpperBand_1h'].ffill()
data_15m['LowerBand_1h'] = data_15m['LowerBand_1h'].ffill()
# --- Generate Signals ---
buy_signals = pd.Series(False, index=data_15m.index)
sell_signals = pd.Series(False, index=data_15m.index)
stop_loss_levels = pd.Series(np.nan, index=data_15m.index)
take_profit_levels = pd.Series(np.nan, index=data_15m.index)
# ATR calculation needs a rolling window, apply to 'high', 'low', 'close' if available
# Using a simplified ATR for now: std of close prices over the last 4 15-min periods (1 hour)
if 'close' in data_15m.columns:
atr_series = price_data_15m.rolling(window=4, min_periods=1).std()
else:
atr_series = pd.Series(0, index=data_15m.index) # No ATR if close is missing
for i in range(len(data_15m)):
if i == 0: continue # Skip first row for volume_ma_15m[i-1]
current_price = data_15m['close'].iloc[i]
current_volume = data_15m['volume'].iloc[i]
rsi_val = data_15m['RSI_15m'].iloc[i]
lb_15m = data_15m['LowerBand_15m'].iloc[i]
ub_15m = data_15m['UpperBand_15m'].iloc[i]
lb_1h = data_15m['LowerBand_1h'].iloc[i]
ub_1h = data_15m['UpperBand_1h'].iloc[i]
vol_ma = data_15m['VolumeMA_15m'].iloc[i-1] # Use previous period's MA
atr = atr_series.iloc[i]
vol_confirm = self._volume_confirmation_crypto(current_volume, vol_ma)
buy_signal, sell_signal = self._multi_timeframe_signal_crypto(
current_price, rsi_val, lb_15m, lb_1h, ub_15m, ub_1h
)
if buy_signal and vol_confirm:
buy_signals.iloc[i] = True
if not pd.isna(atr) and atr > 0:
stop_loss_levels.iloc[i] = current_price - 2 * atr
take_profit_levels.iloc[i] = current_price + 4 * atr
elif sell_signal and vol_confirm:
sell_signals.iloc[i] = True
if not pd.isna(atr) and atr > 0:
stop_loss_levels.iloc[i] = current_price + 2 * atr
take_profit_levels.iloc[i] = current_price - 4 * atr
data_15m['BuySignal'] = buy_signals
data_15m['SellSignal'] = sell_signals
data_15m['StopLoss'] = stop_loss_levels
data_15m['TakeProfit'] = take_profit_levels
return data_15m

View File

@ -1,29 +1,26 @@
import pandas as pd
import numpy as np
class BollingerBands:
"""
Calculates Bollinger Bands for given financial data.
"""
def __init__(self, config):
def __init__(self, period: int = 20, std_dev_multiplier: float = 2.0):
"""
Initializes the BollingerBands calculator.
Args:
period (int): The period for the moving average and standard deviation.
std_dev_multiplier (float): The number of standard deviations for the upper and lower bands.
bb_width (float): The width of the Bollinger Bands.
"""
if config['bb_period'] <= 0:
if period <= 0:
raise ValueError("Period must be a positive integer.")
if config['trending']['bb_std_dev_multiplier'] <= 0 or config['sideways']['bb_std_dev_multiplier'] <= 0:
if std_dev_multiplier <= 0:
raise ValueError("Standard deviation multiplier must be positive.")
if config['bb_width'] <= 0:
raise ValueError("BB width must be positive.")
self.config = config
self.period = period
self.std_dev_multiplier = std_dev_multiplier
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close', squeeze = False) -> pd.DataFrame:
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame:
"""
Calculates Bollinger Bands and adds them to the DataFrame.
@ -37,109 +34,17 @@ class BollingerBands:
'UpperBand',
'LowerBand'.
"""
# Work on a copy to avoid modifying the original DataFrame passed to the function
data_df = data_df.copy()
if price_column not in data_df.columns:
raise ValueError(f"Price column '{price_column}' not found in DataFrame.")
if not squeeze:
period = self.config['bb_period']
bb_width_threshold = self.config['bb_width']
trending_std_multiplier = self.config['trending']['bb_std_dev_multiplier']
sideways_std_multiplier = self.config['sideways']['bb_std_dev_multiplier']
# Calculate SMA
data_df['SMA'] = data_df[price_column].rolling(window=period).mean()
# Calculate SMA
data_df['SMA'] = data_df[price_column].rolling(window=self.period).mean()
# Calculate Standard Deviation
std_dev = data_df[price_column].rolling(window=period).std()
# Calculate Standard Deviation
std_dev = data_df[price_column].rolling(window=self.period).std()
# Calculate reference Upper and Lower Bands for BBWidth calculation (e.g., using 2.0 std dev)
# This ensures BBWidth is calculated based on a consistent band definition before applying adaptive multipliers.
ref_upper_band = data_df['SMA'] + (2.0 * std_dev)
ref_lower_band = data_df['SMA'] - (2.0 * std_dev)
# Calculate the width of the Bollinger Bands
# Avoid division by zero or NaN if SMA is zero or NaN by replacing with np.nan
data_df['BBWidth'] = np.where(data_df['SMA'] != 0, (ref_upper_band - ref_lower_band) / data_df['SMA'], np.nan)
# Calculate the market regime (1 = sideways, 0 = trending)
# Handle NaN in BBWidth: if BBWidth is NaN, MarketRegime should also be NaN or a default (e.g. trending)
data_df['MarketRegime'] = np.where(data_df['BBWidth'].isna(), np.nan,
(data_df['BBWidth'] < bb_width_threshold).astype(float)) # Use float for NaN compatibility
# Determine the std dev multiplier for each row based on its market regime
conditions = [
data_df['MarketRegime'] == 1, # Sideways market
data_df['MarketRegime'] == 0 # Trending market
]
choices = [
sideways_std_multiplier,
trending_std_multiplier
]
# Default multiplier if MarketRegime is NaN (e.g., use trending or a neutral default like 2.0)
# For now, let's use trending_std_multiplier as default if MarketRegime is NaN.
# This can be adjusted based on desired behavior for periods where regime is undetermined.
row_specific_std_multiplier = np.select(conditions, choices, default=trending_std_multiplier)
# Calculate final Upper and Lower Bands using the row-specific multiplier
data_df['UpperBand'] = data_df['SMA'] + (row_specific_std_multiplier * std_dev)
data_df['LowerBand'] = data_df['SMA'] - (row_specific_std_multiplier * std_dev)
else: # squeeze is True
price_series = data_df[price_column]
# Use the static method for the squeeze case with fixed parameters
upper_band, sma, lower_band = self.calculate_custom_bands(
price_series,
window=14,
num_std=1.5,
min_periods=14 # Match typical squeeze behavior where bands appear after full period
)
data_df['SMA'] = sma
data_df['UpperBand'] = upper_band
data_df['LowerBand'] = lower_band
# BBWidth and MarketRegime are not typically calculated/used in a simple squeeze context by this method
# If needed, they could be added, but the current structure implies they are part of the non-squeeze path.
data_df['BBWidth'] = np.nan
data_df['MarketRegime'] = np.nan
# Calculate Upper and Lower Bands
data_df['UpperBand'] = data_df['SMA'] + (self.std_dev_multiplier * std_dev)
data_df['LowerBand'] = data_df['SMA'] - (self.std_dev_multiplier * std_dev)
return data_df
@staticmethod
def calculate_custom_bands(price_series: pd.Series, window: int = 20, num_std: float = 2.0, min_periods: int = None) -> tuple[pd.Series, pd.Series, pd.Series]:
"""
Calculates Bollinger Bands with specified window and standard deviation multiplier.
Args:
price_series (pd.Series): Series of prices.
window (int): The period for the moving average and standard deviation.
num_std (float): The number of standard deviations for the upper and lower bands.
min_periods (int, optional): Minimum number of observations in window required to have a value.
Defaults to `window` if None.
Returns:
tuple[pd.Series, pd.Series, pd.Series]: Upper band, SMA, Lower band.
"""
if not isinstance(price_series, pd.Series):
raise TypeError("price_series must be a pandas Series.")
if not isinstance(window, int) or window <= 0:
raise ValueError("window must be a positive integer.")
if not isinstance(num_std, (int, float)) or num_std <= 0:
raise ValueError("num_std must be a positive number.")
if min_periods is not None and (not isinstance(min_periods, int) or min_periods <= 0):
raise ValueError("min_periods must be a positive integer if provided.")
actual_min_periods = window if min_periods is None else min_periods
sma = price_series.rolling(window=window, min_periods=actual_min_periods).mean()
std = price_series.rolling(window=window, min_periods=actual_min_periods).std()
# Replace NaN std with 0 to avoid issues if sma is present but std is not (e.g. constant price in window)
std = std.fillna(0)
upper_band = sma + (std * num_std)
lower_band = sma - (std * num_std)
return upper_band, sma, lower_band

View File

@ -5,7 +5,7 @@ class RSI:
"""
A class to calculate the Relative Strength Index (RSI).
"""
def __init__(self, config):
def __init__(self, period: int = 14):
"""
Initializes the RSI calculator.
@ -13,13 +13,13 @@ class RSI:
period (int): The period for RSI calculation. Default is 14.
Must be a positive integer.
"""
if not isinstance(config['rsi_period'], int) or config['rsi_period'] <= 0:
if not isinstance(period, int) or period <= 0:
raise ValueError("Period must be a positive integer.")
self.period = config['rsi_period']
self.period = period
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame:
"""
Calculates the RSI (using Wilder's smoothing) and adds it as a column to the input DataFrame.
Calculates the RSI and adds it as a column to the input DataFrame.
Args:
data_df (pd.DataFrame): DataFrame with historical price data.
@ -35,79 +35,75 @@ class RSI:
if price_column not in data_df.columns:
raise ValueError(f"Price column '{price_column}' not found in DataFrame.")
# Check if data is sufficient for calculation (need period + 1 for one diff calculation)
if len(data_df) < self.period + 1:
print(f"Warning: Data length ({len(data_df)}) is less than RSI period ({self.period}) + 1. RSI will not be calculated meaningfully.")
df_copy = data_df.copy()
df_copy['RSI'] = np.nan # Add an RSI column with NaNs
return df_copy
if len(data_df) < self.period:
print(f"Warning: Data length ({len(data_df)}) is less than RSI period ({self.period}). RSI will not be calculated.")
return data_df.copy()
df = data_df.copy() # Work on a copy
df = data_df.copy()
delta = df[price_column].diff(1)
gain = delta.where(delta > 0, 0)
loss = -delta.where(delta < 0, 0) # Ensure loss is positive
# Calculate initial average gain and loss (SMA)
avg_gain = gain.rolling(window=self.period, min_periods=self.period).mean().iloc[self.period -1:self.period]
avg_loss = loss.rolling(window=self.period, min_periods=self.period).mean().iloc[self.period -1:self.period]
# Calculate subsequent average gains and losses (EMA-like)
# Pre-allocate lists for gains and losses to avoid repeated appending to Series
gains = [0.0] * len(df)
losses = [0.0] * len(df)
if not avg_gain.empty:
gains[self.period -1] = avg_gain.iloc[0]
if not avg_loss.empty:
losses[self.period -1] = avg_loss.iloc[0]
for i in range(self.period, len(df)):
gains[i] = ((gains[i-1] * (self.period - 1)) + gain.iloc[i]) / self.period
losses[i] = ((losses[i-1] * (self.period - 1)) + loss.iloc[i]) / self.period
price_series = df[price_column]
df['avg_gain'] = pd.Series(gains, index=df.index)
df['avg_loss'] = pd.Series(losses, index=df.index)
# Calculate RS
# Handle division by zero: if avg_loss is 0, RS is undefined or infinite.
# If avg_loss is 0 and avg_gain is also 0, RSI is conventionally 50.
# If avg_loss is 0 and avg_gain > 0, RSI is conventionally 100.
rs = df['avg_gain'] / df['avg_loss']
# Call the static custom RSI calculator, defaulting to EMA for Wilder's smoothing
rsi_series = self.calculate_custom_rsi(price_series, window=self.period, smoothing='EMA')
# Calculate RSI
# RSI = 100 - (100 / (1 + RS))
# If avg_loss is 0:
# If avg_gain > 0, RS -> inf, RSI -> 100
# If avg_gain == 0, RS -> NaN (0/0), RSI -> 50 (conventionally, or could be 0 or 100 depending on interpretation)
# We will use a common convention where RSI is 100 if avg_loss is 0 and avg_gain > 0,
# and RSI is 0 if avg_loss is 0 and avg_gain is 0 (or 50, let's use 0 to indicate no strength if both are 0).
# However, to avoid NaN from 0/0, it's better to calculate RSI directly with conditions.
rsi_values = []
for i in range(len(df)):
avg_g = df['avg_gain'].iloc[i]
avg_l = df['avg_loss'].iloc[i]
if i < self.period -1 : # Not enough data for initial SMA
rsi_values.append(np.nan)
continue
if avg_l == 0:
if avg_g == 0:
rsi_values.append(50) # Or 0, or np.nan depending on how you want to treat this. 50 implies neutrality.
else:
rsi_values.append(100) # Max strength
else:
rs_val = avg_g / avg_l
rsi_values.append(100 - (100 / (1 + rs_val)))
df['RSI'] = rsi_series
df['RSI'] = pd.Series(rsi_values, index=df.index)
# Remove intermediate columns if desired, or keep them for debugging
# df.drop(columns=['avg_gain', 'avg_loss'], inplace=True)
return df
@staticmethod
def calculate_custom_rsi(price_series: pd.Series, window: int = 14, smoothing: str = 'SMA') -> pd.Series:
"""
Calculates RSI with specified window and smoothing (SMA or EMA).
Args:
price_series (pd.Series): Series of prices.
window (int): The period for RSI calculation. Must be a positive integer.
smoothing (str): Smoothing method, 'SMA' or 'EMA'. Defaults to 'SMA'.
Returns:
pd.Series: Series containing the RSI values.
"""
if not isinstance(price_series, pd.Series):
raise TypeError("price_series must be a pandas Series.")
if not isinstance(window, int) or window <= 0:
raise ValueError("window must be a positive integer.")
if smoothing not in ['SMA', 'EMA']:
raise ValueError("smoothing must be either 'SMA' or 'EMA'.")
if len(price_series) < window + 1: # Need at least window + 1 prices for one diff
# print(f"Warning: Data length ({len(price_series)}) is less than RSI window ({window}) + 1. RSI will be all NaN.")
return pd.Series(np.nan, index=price_series.index)
delta = price_series.diff()
# The first delta is NaN. For gain/loss calculations, it can be treated as 0.
# However, subsequent rolling/ewm will handle NaNs appropriately if min_periods is set.
gain = delta.where(delta > 0, 0.0)
loss = -delta.where(delta < 0, 0.0) # Ensure loss is positive
# Ensure gain and loss Series have the same index as price_series for rolling/ewm
# This is important if price_series has missing dates/times
gain = gain.reindex(price_series.index, fill_value=0.0)
loss = loss.reindex(price_series.index, fill_value=0.0)
if smoothing == 'EMA':
# adjust=False for Wilder's smoothing used in RSI
avg_gain = gain.ewm(alpha=1/window, adjust=False, min_periods=window).mean()
avg_loss = loss.ewm(alpha=1/window, adjust=False, min_periods=window).mean()
else: # SMA
avg_gain = gain.rolling(window=window, min_periods=window).mean()
avg_loss = loss.rolling(window=window, min_periods=window).mean()
# Handle division by zero for RS calculation
# If avg_loss is 0, RS can be considered infinite (if avg_gain > 0) or undefined (if avg_gain also 0)
rs = avg_gain / avg_loss.replace(0, 1e-9) # Replace 0 with a tiny number to avoid direct division by zero warning
rsi = 100 - (100 / (1 + rs))
# Correct RSI values for edge cases where avg_loss was 0
# If avg_loss is 0 and avg_gain is > 0, RSI is 100.
# If avg_loss is 0 and avg_gain is 0, RSI is 50 (neutral).
rsi[avg_loss == 0] = np.where(avg_gain[avg_loss == 0] > 0, 100, 50)
# Ensure RSI is NaN where avg_gain or avg_loss is NaN (due to min_periods)
rsi[avg_gain.isna() | avg_loss.isna()] = np.nan
return rsi

View File

@ -1,453 +1,86 @@
import os
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
class BacktestCharts:
@staticmethod
def plot(df, meta_trend):
def __init__(self, charts_dir="charts"):
self.charts_dir = charts_dir
os.makedirs(self.charts_dir, exist_ok=True)
def plot_profit_ratio_vs_stop_loss(self, results, filename="profit_ratio_vs_stop_loss.png"):
"""
Plot close price line chart with a bar at the bottom: green when trend is 1, red when trend is 0.
The bar stays at the bottom even when zooming/panning.
- df: DataFrame with columns ['close', ...] and a datetime index or 'timestamp' column.
- meta_trend: array-like, same length as df, values 1 (green) or 0 (red).
Plots profit ratio vs stop loss percentage for each timeframe.
Parameters:
- results: list of dicts, each with keys: 'timeframe', 'stop_loss_pct', 'profit_ratio'
- filename: output filename (will be saved in charts_dir)
"""
fig, (ax_price, ax_bar) = plt.subplots(
nrows=2, ncols=1, figsize=(16, 8), sharex=True,
gridspec_kw={'height_ratios': [12, 1]}
)
# Organize data by timeframe
from collections import defaultdict
data = defaultdict(lambda: {"stop_loss_pct": [], "profit_ratio": []})
for row in results:
tf = row["timeframe"]
data[tf]["stop_loss_pct"].append(row["stop_loss_pct"])
data[tf]["profit_ratio"].append(row["profit_ratio"])
sns.lineplot(x=df.index, y=df['close'], label='Close Price', color='blue', ax=ax_price)
ax_price.set_title('Close Price with Trend Bar (Green=1, Red=0)')
ax_price.set_ylabel('Price')
ax_price.grid(True, alpha=0.3)
ax_price.legend()
plt.figure(figsize=(10, 6))
for tf, vals in data.items():
# Sort by stop_loss_pct for smooth lines
sorted_pairs = sorted(zip(vals["stop_loss_pct"], vals["profit_ratio"]))
stop_loss, profit_ratio = zip(*sorted_pairs)
plt.plot(
[s * 100 for s in stop_loss], # Convert to percent
profit_ratio,
marker="o",
label=tf
)
# Clean meta_trend: ensure only 0/1, handle NaNs by forward-fill then fill remaining with 0
meta_trend_arr = np.asarray(meta_trend)
if not np.issubdtype(meta_trend_arr.dtype, np.number):
meta_trend_arr = pd.Series(meta_trend_arr).astype(float).to_numpy()
if np.isnan(meta_trend_arr).any():
meta_trend_arr = pd.Series(meta_trend_arr).fillna(method='ffill').fillna(0).astype(int).to_numpy()
else:
meta_trend_arr = meta_trend_arr.astype(int)
meta_trend_arr = np.where(meta_trend_arr != 1, 0, 1) # force only 0 or 1
if hasattr(df.index, 'to_numpy'):
x_vals = df.index.to_numpy()
else:
x_vals = np.array(df.index)
# Find contiguous regions
regions = []
start = 0
for i in range(1, len(meta_trend_arr)):
if meta_trend_arr[i] != meta_trend_arr[i-1]:
regions.append((start, i-1, meta_trend_arr[i-1]))
start = i
regions.append((start, len(meta_trend_arr)-1, meta_trend_arr[-1]))
# Draw red vertical lines at the start of each new region (except the first)
for region_idx in range(1, len(regions)):
region_start = regions[region_idx][0]
ax_price.axvline(x=x_vals[region_start], color='black', linestyle='--', alpha=0.7, linewidth=1)
for start, end, trend in regions:
color = '#089981' if trend == 1 else '#F23645'
# Offset by 1 on x: span from x_vals[start] to x_vals[end+1] if possible
x_start = x_vals[start]
x_end = x_vals[end+1] if end+1 < len(x_vals) else x_vals[end]
ax_bar.axvspan(x_start, x_end, color=color, alpha=1, ymin=0, ymax=1)
ax_bar.set_ylim(0, 1)
ax_bar.set_yticks([])
ax_bar.set_ylabel('Trend')
ax_bar.set_xlabel('Time')
ax_bar.grid(False)
ax_bar.set_title('Meta Trend')
plt.tight_layout(h_pad=0.1)
plt.show()
@staticmethod
def format_strategy_data_with_trades(strategy_data, backtest_results):
"""
Format strategy data for universal plotting with actual executed trades.
Converts strategy output into the expected column format: "x_type_name"
Args:
strategy_data (DataFrame): Output from strategy with columns like 'close', 'UpperBand', 'LowerBand', 'RSI'
backtest_results (dict): Results from backtest.run() containing actual executed trades
Returns:
DataFrame: Formatted data ready for plot_data function
"""
formatted_df = pd.DataFrame(index=strategy_data.index)
# Plot 1: Price data with Bollinger Bands and actual trade signals
if 'close' in strategy_data.columns:
formatted_df['1_line_close'] = strategy_data['close']
# Bollinger Bands area (prefer standard names, fallback to timeframe-specific)
upper_band_col = None
lower_band_col = None
sma_col = None
# Check for standard BB columns first
if 'UpperBand' in strategy_data.columns and 'LowerBand' in strategy_data.columns:
upper_band_col = 'UpperBand'
lower_band_col = 'LowerBand'
# Check for 15m BB columns
elif 'UpperBand_15m' in strategy_data.columns and 'LowerBand_15m' in strategy_data.columns:
upper_band_col = 'UpperBand_15m'
lower_band_col = 'LowerBand_15m'
if upper_band_col and lower_band_col:
formatted_df['1_area_bb_upper'] = strategy_data[upper_band_col]
formatted_df['1_area_bb_lower'] = strategy_data[lower_band_col]
# SMA/Moving Average line
if 'SMA' in strategy_data.columns:
sma_col = 'SMA'
elif 'SMA_15m' in strategy_data.columns:
sma_col = 'SMA_15m'
if sma_col:
formatted_df['1_line_sma'] = strategy_data[sma_col]
# Strategy buy/sell signals (all signals from strategy) as smaller scatter points
if 'BuySignal' in strategy_data.columns and 'close' in strategy_data.columns:
strategy_buy_points = strategy_data['close'].where(strategy_data['BuySignal'], np.nan)
formatted_df['1_scatter_strategy_buy'] = strategy_buy_points
if 'SellSignal' in strategy_data.columns and 'close' in strategy_data.columns:
strategy_sell_points = strategy_data['close'].where(strategy_data['SellSignal'], np.nan)
formatted_df['1_scatter_strategy_sell'] = strategy_sell_points
# Actual executed trades from backtest results (larger, more prominent)
if 'trades' in backtest_results and backtest_results['trades']:
# Create series for buy and sell points
buy_points = pd.Series(np.nan, index=strategy_data.index)
sell_points = pd.Series(np.nan, index=strategy_data.index)
for trade in backtest_results['trades']:
entry_time = trade.get('entry_time')
exit_time = trade.get('exit_time')
entry_price = trade.get('entry')
exit_price = trade.get('exit')
# Find closest index for entry time
if entry_time is not None and entry_price is not None:
try:
if isinstance(entry_time, str):
entry_time = pd.to_datetime(entry_time)
# Find the closest index to entry_time
closest_entry_idx = strategy_data.index.get_indexer([entry_time], method='nearest')[0]
if closest_entry_idx >= 0:
buy_points.iloc[closest_entry_idx] = entry_price
except (ValueError, IndexError, TypeError):
pass # Skip if can't find matching time
# Find closest index for exit time
if exit_time is not None and exit_price is not None:
try:
if isinstance(exit_time, str):
exit_time = pd.to_datetime(exit_time)
# Find the closest index to exit_time
closest_exit_idx = strategy_data.index.get_indexer([exit_time], method='nearest')[0]
if closest_exit_idx >= 0:
sell_points.iloc[closest_exit_idx] = exit_price
except (ValueError, IndexError, TypeError):
pass # Skip if can't find matching time
formatted_df['1_scatter_actual_buy'] = buy_points
formatted_df['1_scatter_actual_sell'] = sell_points
# Stop Loss and Take Profit levels
if 'StopLoss' in strategy_data.columns:
formatted_df['1_line_stop_loss'] = strategy_data['StopLoss']
if 'TakeProfit' in strategy_data.columns:
formatted_df['1_line_take_profit'] = strategy_data['TakeProfit']
# Plot 2: RSI
rsi_col = None
if 'RSI' in strategy_data.columns:
rsi_col = 'RSI'
elif 'RSI_15m' in strategy_data.columns:
rsi_col = 'RSI_15m'
if rsi_col:
formatted_df['2_line_rsi'] = strategy_data[rsi_col]
# Add RSI overbought/oversold levels
formatted_df['2_line_rsi_overbought'] = 70
formatted_df['2_line_rsi_oversold'] = 30
# Plot 3: Volume (if available)
if 'volume' in strategy_data.columns:
formatted_df['3_bar_volume'] = strategy_data['volume']
# Add volume moving average if available
if 'VolumeMA_15m' in strategy_data.columns:
formatted_df['3_line_volume_ma'] = strategy_data['VolumeMA_15m']
return formatted_df
@staticmethod
def format_strategy_data(strategy_data):
"""
Format strategy data for universal plotting (without trade signals).
Converts strategy output into the expected column format: "x_type_name"
Args:
strategy_data (DataFrame): Output from strategy with columns like 'close', 'UpperBand', 'LowerBand', 'RSI'
Returns:
DataFrame: Formatted data ready for plot_data function
"""
formatted_df = pd.DataFrame(index=strategy_data.index)
# Plot 1: Price data with Bollinger Bands
if 'close' in strategy_data.columns:
formatted_df['1_line_close'] = strategy_data['close']
# Bollinger Bands area (prefer standard names, fallback to timeframe-specific)
upper_band_col = None
lower_band_col = None
sma_col = None
# Check for standard BB columns first
if 'UpperBand' in strategy_data.columns and 'LowerBand' in strategy_data.columns:
upper_band_col = 'UpperBand'
lower_band_col = 'LowerBand'
# Check for 15m BB columns
elif 'UpperBand_15m' in strategy_data.columns and 'LowerBand_15m' in strategy_data.columns:
upper_band_col = 'UpperBand_15m'
lower_band_col = 'LowerBand_15m'
if upper_band_col and lower_band_col:
formatted_df['1_area_bb_upper'] = strategy_data[upper_band_col]
formatted_df['1_area_bb_lower'] = strategy_data[lower_band_col]
# SMA/Moving Average line
if 'SMA' in strategy_data.columns:
sma_col = 'SMA'
elif 'SMA_15m' in strategy_data.columns:
sma_col = 'SMA_15m'
if sma_col:
formatted_df['1_line_sma'] = strategy_data[sma_col]
# Stop Loss and Take Profit levels
if 'StopLoss' in strategy_data.columns:
formatted_df['1_line_stop_loss'] = strategy_data['StopLoss']
if 'TakeProfit' in strategy_data.columns:
formatted_df['1_line_take_profit'] = strategy_data['TakeProfit']
# Plot 2: RSI
rsi_col = None
if 'RSI' in strategy_data.columns:
rsi_col = 'RSI'
elif 'RSI_15m' in strategy_data.columns:
rsi_col = 'RSI_15m'
if rsi_col:
formatted_df['2_line_rsi'] = strategy_data[rsi_col]
# Add RSI overbought/oversold levels
formatted_df['2_line_rsi_overbought'] = 70
formatted_df['2_line_rsi_oversold'] = 30
# Plot 3: Volume (if available)
if 'volume' in strategy_data.columns:
formatted_df['3_bar_volume'] = strategy_data['volume']
# Add volume moving average if available
if 'VolumeMA_15m' in strategy_data.columns:
formatted_df['3_line_volume_ma'] = strategy_data['VolumeMA_15m']
return formatted_df
@staticmethod
def plot_data(df):
"""
Universal plot function for any formatted data.
- df: DataFrame with column names in format "x_type_name" where:
x = plot number (subplot)
type = plot type (line, area, scatter, bar, etc.)
name = descriptive name for the data series
"""
if df.empty:
print("No data to plot")
return
# Parse all columns
plot_info = []
for column in df.columns:
parts = column.split('_', 2) # Split into max 3 parts
if len(parts) < 3:
print(f"Warning: Skipping column '{column}' - invalid format. Expected 'x_type_name'")
continue
try:
plot_number = int(parts[0])
plot_type = parts[1].lower()
plot_name = parts[2]
plot_info.append((plot_number, plot_type, plot_name, column))
except ValueError:
print(f"Warning: Skipping column '{column}' - invalid plot number")
continue
if not plot_info:
print("No valid columns found for plotting")
return
# Group by plot number
plots = {}
for plot_num, plot_type, plot_name, column in plot_info:
if plot_num not in plots:
plots[plot_num] = []
plots[plot_num].append((plot_type, plot_name, column))
# Sort plot numbers
plot_numbers = sorted(plots.keys())
n_plots = len(plot_numbers)
# Create subplots
fig, axs = plt.subplots(n_plots, 1, figsize=(16, 6 * n_plots), sharex=True)
if n_plots == 1:
axs = [axs] # Ensure axs is always a list
# Plot each subplot
for i, plot_num in enumerate(plot_numbers):
ax = axs[i]
plot_items = plots[plot_num]
# Handle Bollinger Bands area first (needs special handling)
bb_upper = None
bb_lower = None
for plot_type, plot_name, column in plot_items:
if plot_type == 'area' and 'bb_upper' in plot_name:
bb_upper = df[column]
elif plot_type == 'area' and 'bb_lower' in plot_name:
bb_lower = df[column]
# Plot Bollinger Bands area if both bounds exist
if bb_upper is not None and bb_lower is not None:
ax.fill_between(df.index, bb_upper, bb_lower, alpha=0.2, color='gray', label='Bollinger Bands')
# Plot other items
for plot_type, plot_name, column in plot_items:
if plot_type == 'area' and ('bb_upper' in plot_name or 'bb_lower' in plot_name):
continue # Already handled above
data = df[column].dropna() # Remove NaN values for cleaner plots
if plot_type == 'line':
color = None
linestyle = '-'
alpha = 1.0
# Special styling for different line types
if 'overbought' in plot_name:
color = 'red'
linestyle = '--'
alpha = 0.7
elif 'oversold' in plot_name:
color = 'green'
linestyle = '--'
alpha = 0.7
elif 'stop_loss' in plot_name:
color = 'red'
linestyle = ':'
alpha = 0.8
elif 'take_profit' in plot_name:
color = 'green'
linestyle = ':'
alpha = 0.8
elif 'sma' in plot_name:
color = 'orange'
alpha = 0.8
elif 'volume_ma' in plot_name:
color = 'purple'
alpha = 0.7
ax.plot(data.index, data, label=plot_name.replace('_', ' ').title(),
color=color, linestyle=linestyle, alpha=alpha)
elif plot_type == 'scatter':
color = 'green' if 'buy' in plot_name else 'red' if 'sell' in plot_name else 'blue'
marker = '^' if 'buy' in plot_name else 'v' if 'sell' in plot_name else 'o'
size = 100 if 'buy' in plot_name or 'sell' in plot_name else 50
alpha = 0.8
zorder = 5
label_name = plot_name.replace('_', ' ').title()
# Special styling for different signal types
if 'actual_buy' in plot_name:
color = 'darkgreen'
marker = '^'
size = 120
alpha = 1.0
zorder = 10 # Higher z-order to appear on top
label_name = 'Actual Buy Trades'
elif 'actual_sell' in plot_name:
color = 'darkred'
marker = 'v'
size = 120
alpha = 1.0
zorder = 10 # Higher z-order to appear on top
label_name = 'Actual Sell Trades'
elif 'strategy_buy' in plot_name:
color = 'lightgreen'
marker = '^'
size = 60
alpha = 0.6
zorder = 3 # Lower z-order to appear behind actual trades
label_name = 'Strategy Buy Signals'
elif 'strategy_sell' in plot_name:
color = 'lightcoral'
marker = 'v'
size = 60
alpha = 0.6
zorder = 3 # Lower z-order to appear behind actual trades
label_name = 'Strategy Sell Signals'
ax.scatter(data.index, data, label=label_name,
color=color, marker=marker, s=size, alpha=alpha, zorder=zorder)
elif plot_type == 'area':
ax.fill_between(data.index, data, alpha=0.5, label=plot_name.replace('_', ' ').title())
elif plot_type == 'bar':
ax.bar(data.index, data, alpha=0.7, label=plot_name.replace('_', ' ').title())
else:
print(f"Warning: Plot type '{plot_type}' not supported for column '{column}'")
# Customize subplot
ax.grid(True, alpha=0.3)
ax.legend()
# Set titles and labels
if plot_num == 1:
ax.set_title('Price Chart with Bollinger Bands and Signals')
ax.set_ylabel('Price')
elif plot_num == 2:
ax.set_title('RSI Indicator')
ax.set_ylabel('RSI')
ax.set_ylim(0, 100)
elif plot_num == 3:
ax.set_title('Volume')
ax.set_ylabel('Volume')
else:
ax.set_title(f'Plot {plot_num}')
# Set x-axis label only on the bottom subplot
axs[-1].set_xlabel('Time')
plt.xlabel("Stop Loss (%)")
plt.ylabel("Profit Ratio")
plt.title("Profit Ratio vs Stop Loss (%) per Timeframe")
plt.legend(title="Timeframe")
plt.grid(True, linestyle="--", alpha=0.5)
plt.tight_layout()
plt.show()
output_path = os.path.join(self.charts_dir, filename)
plt.savefig(output_path)
plt.close()
def plot_average_trade_vs_stop_loss(self, results, filename="average_trade_vs_stop_loss.png"):
"""
Plots average trade vs stop loss percentage for each timeframe.
Parameters:
- results: list of dicts, each with keys: 'timeframe', 'stop_loss_pct', 'average_trade'
- filename: output filename (will be saved in charts_dir)
"""
from collections import defaultdict
data = defaultdict(lambda: {"stop_loss_pct": [], "average_trade": []})
for row in results:
tf = row["timeframe"]
if "average_trade" not in row:
continue # Skip rows without average_trade
data[tf]["stop_loss_pct"].append(row["stop_loss_pct"])
data[tf]["average_trade"].append(row["average_trade"])
plt.figure(figsize=(10, 6))
for tf, vals in data.items():
# Sort by stop_loss_pct for smooth lines
sorted_pairs = sorted(zip(vals["stop_loss_pct"], vals["average_trade"]))
stop_loss, average_trade = zip(*sorted_pairs)
plt.plot(
[s * 100 for s in stop_loss], # Convert to percent
average_trade,
marker="o",
label=tf
)
plt.xlabel("Stop Loss (%)")
plt.ylabel("Average Trade")
plt.title("Average Trade vs Stop Loss (%) per Timeframe")
plt.legend(title="Timeframe")
plt.grid(True, linestyle="--", alpha=0.5)
plt.tight_layout()
output_path = os.path.join(self.charts_dir, filename)
plt.savefig(output_path)
plt.close()

View File

@ -2,6 +2,6 @@ import pandas as pd
class MarketFees:
@staticmethod
def calculate_okx_taker_maker_fee(amount, is_maker=True) -> float:
def calculate_okx_taker_maker_fee(amount, is_maker=True):
fee_rate = 0.0008 if is_maker else 0.0010
return amount * fee_rate

View File

@ -1,42 +0,0 @@
"""
Strategies Module
This module contains the strategy management system for trading strategies.
It provides a flexible framework for implementing, combining, and managing multiple trading strategies.
Components:
- StrategyBase: Abstract base class for all strategies
- DefaultStrategy: Meta-trend based strategy
- BBRSStrategy: Bollinger Bands + RSI strategy
- StrategyManager: Orchestrates multiple strategies
- StrategySignal: Represents trading signals with confidence levels
Usage:
from cycles.strategies import StrategyManager, create_strategy_manager
# Create strategy manager from config
strategy_manager = create_strategy_manager(config)
# Or create individual strategies
from cycles.strategies import DefaultStrategy, BBRSStrategy
default_strategy = DefaultStrategy(weight=1.0, params={})
"""
from .base import StrategyBase, StrategySignal
from .default_strategy import DefaultStrategy
from .bbrs_strategy import BBRSStrategy
from .random_strategy import RandomStrategy
from .manager import StrategyManager, create_strategy_manager
__all__ = [
'StrategyBase',
'StrategySignal',
'DefaultStrategy',
'BBRSStrategy',
'RandomStrategy',
'StrategyManager',
'create_strategy_manager'
]
__version__ = '1.0.0'
__author__ = 'TCP Cycles Team'

View File

@ -1,250 +0,0 @@
"""
Base classes for the strategy management system.
This module contains the fundamental building blocks for all trading strategies:
- StrategySignal: Represents trading signals with confidence and metadata
- StrategyBase: Abstract base class that all strategies must inherit from
"""
import pandas as pd
from abc import ABC, abstractmethod
from typing import Dict, Optional, List, Union
class StrategySignal:
"""
Represents a trading signal from a strategy.
A signal encapsulates the strategy's recommendation along with confidence
level, optional price target, and additional metadata.
Attributes:
signal_type (str): Type of signal - "ENTRY", "EXIT", or "HOLD"
confidence (float): Confidence level from 0.0 to 1.0
price (Optional[float]): Optional specific price for the signal
metadata (Dict): Additional signal data and context
Example:
# Entry signal with high confidence
signal = StrategySignal("ENTRY", confidence=0.8)
# Exit signal with stop loss price
signal = StrategySignal("EXIT", confidence=1.0, price=50000,
metadata={"type": "STOP_LOSS"})
"""
def __init__(self, signal_type: str, confidence: float = 1.0,
price: Optional[float] = None, metadata: Optional[Dict] = None):
"""
Initialize a strategy signal.
Args:
signal_type: Type of signal ("ENTRY", "EXIT", "HOLD")
confidence: Confidence level (0.0 to 1.0)
price: Optional specific price for the signal
metadata: Additional signal data and context
"""
self.signal_type = signal_type
self.confidence = max(0.0, min(1.0, confidence)) # Clamp to [0,1]
self.price = price
self.metadata = metadata or {}
def __repr__(self) -> str:
"""String representation of the signal."""
return (f"StrategySignal(type={self.signal_type}, "
f"confidence={self.confidence:.2f}, "
f"price={self.price}, metadata={self.metadata})")
class StrategyBase(ABC):
"""
Abstract base class for all trading strategies.
This class defines the interface that all strategies must implement:
- get_timeframes(): Specify required timeframes for the strategy
- initialize(): Setup strategy with backtester data
- get_entry_signal(): Generate entry signals
- get_exit_signal(): Generate exit signals
- get_confidence(): Optional confidence calculation
Attributes:
name (str): Strategy name
weight (float): Strategy weight for combination
params (Dict): Strategy parameters
initialized (bool): Whether strategy has been initialized
timeframes_data (Dict): Resampled data for different timeframes
Example:
class MyStrategy(StrategyBase):
def get_timeframes(self):
return ["15min"] # This strategy works on 15-minute data
def initialize(self, backtester):
# Setup strategy indicators using self.timeframes_data["15min"]
self.initialized = True
def get_entry_signal(self, backtester, df_index):
# Return StrategySignal based on analysis
if should_enter:
return StrategySignal("ENTRY", confidence=0.7)
return StrategySignal("HOLD", confidence=0.0)
"""
def __init__(self, name: str, weight: float = 1.0, params: Optional[Dict] = None):
"""
Initialize the strategy base.
Args:
name: Strategy name/identifier
weight: Strategy weight for combination (default: 1.0)
params: Strategy-specific parameters
"""
self.name = name
self.weight = weight
self.params = params or {}
self.initialized = False
self.timeframes_data = {} # Will store resampled data for each timeframe
def get_timeframes(self) -> List[str]:
"""
Get the list of timeframes required by this strategy.
Override this method to specify which timeframes your strategy needs.
The base class will automatically resample the 1-minute data to these timeframes
and make them available in self.timeframes_data.
Returns:
List[str]: List of timeframe strings (e.g., ["1min", "15min", "1h"])
Example:
def get_timeframes(self):
return ["15min"] # Strategy needs 15-minute data
def get_timeframes(self):
return ["5min", "15min", "1h"] # Multi-timeframe strategy
"""
return ["1min"] # Default to 1-minute data
def _resample_data(self, original_data: pd.DataFrame) -> None:
"""
Resample the original 1-minute data to all required timeframes.
This method is called automatically during initialization to create
resampled versions of the data for each timeframe the strategy needs.
Args:
original_data: Original 1-minute OHLCV data with DatetimeIndex
"""
self.timeframes_data = {}
for timeframe in self.get_timeframes():
if timeframe == "1min":
# For 1-minute data, just use the original
self.timeframes_data[timeframe] = original_data.copy()
else:
# Resample to the specified timeframe
resampled = original_data.resample(timeframe).agg({
'open': 'first',
'high': 'max',
'low': 'min',
'close': 'last',
'volume': 'sum'
}).dropna()
self.timeframes_data[timeframe] = resampled
def get_data_for_timeframe(self, timeframe: str) -> Optional[pd.DataFrame]:
"""
Get resampled data for a specific timeframe.
Args:
timeframe: Timeframe string (e.g., "15min", "1h")
Returns:
pd.DataFrame: Resampled OHLCV data or None if timeframe not available
"""
return self.timeframes_data.get(timeframe)
def get_primary_timeframe_data(self) -> pd.DataFrame:
"""
Get data for the primary (first) timeframe.
Returns:
pd.DataFrame: Data for the first timeframe in get_timeframes() list
"""
primary_timeframe = self.get_timeframes()[0]
return self.timeframes_data[primary_timeframe]
@abstractmethod
def initialize(self, backtester) -> None:
"""
Initialize strategy with backtester data.
This method is called once before backtesting begins.
The original 1-minute data will already be resampled to all required timeframes
and available in self.timeframes_data.
Strategies should setup indicators, validate data, and
set self.initialized = True when complete.
Args:
backtester: Backtest instance with data and configuration
"""
pass
@abstractmethod
def get_entry_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate entry signal for the given data index.
The df_index refers to the index in the backtester's working dataframe,
which corresponds to the primary timeframe data.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Entry signal with confidence level
"""
pass
@abstractmethod
def get_exit_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate exit signal for the given data index.
The df_index refers to the index in the backtester's working dataframe,
which corresponds to the primary timeframe data.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Exit signal with confidence level
"""
pass
def get_confidence(self, backtester, df_index: int) -> float:
"""
Get strategy confidence for the current market state.
Default implementation returns 1.0. Strategies can override
this to provide dynamic confidence based on market conditions.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
float: Confidence level (0.0 to 1.0)
"""
return 1.0
def __repr__(self) -> str:
"""String representation of the strategy."""
timeframes = self.get_timeframes()
return (f"{self.__class__.__name__}(name={self.name}, "
f"weight={self.weight}, timeframes={timeframes}, "
f"initialized={self.initialized})")

View File

@ -1,344 +0,0 @@
"""
Bollinger Bands + RSI Strategy (BBRS)
This module implements a sophisticated trading strategy that combines Bollinger Bands
and RSI indicators with market regime detection. The strategy adapts its parameters
based on whether the market is trending or moving sideways.
Key Features:
- Dynamic parameter adjustment based on market regime
- Bollinger Band squeeze detection
- RSI overbought/oversold conditions
- Market regime-specific thresholds
- Multi-timeframe analysis support
"""
import pandas as pd
import numpy as np
import logging
from typing import Tuple, Optional, List
from .base import StrategyBase, StrategySignal
class BBRSStrategy(StrategyBase):
"""
Bollinger Bands + RSI Strategy implementation.
This strategy uses Bollinger Bands and RSI indicators with market regime detection
to generate trading signals. It adapts its parameters based on whether the market
is in a trending or sideways regime.
The strategy works with 1-minute data as input and lets the underlying Strategy class
handle internal resampling to the timeframes it needs (typically 15min and 1h).
Stop-loss execution uses 1-minute precision.
Parameters:
bb_width (float): Bollinger Band width threshold (default: 0.05)
bb_period (int): Bollinger Band period (default: 20)
rsi_period (int): RSI calculation period (default: 14)
trending_rsi_threshold (list): RSI thresholds for trending market [low, high]
trending_bb_multiplier (float): BB multiplier for trending market
sideways_rsi_threshold (list): RSI thresholds for sideways market [low, high]
sideways_bb_multiplier (float): BB multiplier for sideways market
strategy_name (str): Strategy implementation name ("MarketRegimeStrategy" or "CryptoTradingStrategy")
SqueezeStrategy (bool): Enable squeeze strategy
stop_loss_pct (float): Stop loss percentage (default: 0.05)
Example:
params = {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"strategy_name": "MarketRegimeStrategy",
"SqueezeStrategy": true
}
strategy = BBRSStrategy(weight=1.0, params=params)
"""
def __init__(self, weight: float = 1.0, params: Optional[dict] = None):
"""
Initialize the BBRS strategy.
Args:
weight: Strategy weight for combination (default: 1.0)
params: Strategy parameters for Bollinger Bands and RSI
"""
super().__init__("bbrs", weight, params)
def get_timeframes(self) -> List[str]:
"""
Get the timeframes required by the BBRS strategy.
BBRS strategy uses 1-minute data as input and lets the Strategy class
handle internal resampling to the timeframes it needs (15min, 1h, etc.).
We still include 1min for stop-loss precision.
Returns:
List[str]: List of timeframes needed for the strategy
"""
# BBRS strategy works with 1-minute data and lets Strategy class handle resampling
return ["1min"]
def initialize(self, backtester) -> None:
"""
Initialize BBRS strategy with signal processing.
Sets up the strategy by:
1. Using 1-minute data directly (Strategy class handles internal resampling)
2. Running the BBRS strategy processing on 1-minute data
3. Creating signals aligned with backtester expectations
Args:
backtester: Backtest instance with OHLCV data
"""
# Resample to get 1-minute data (which should be the original data)
self._resample_data(backtester.original_df)
# Get 1-minute data for strategy processing - Strategy class will handle internal resampling
min1_data = self.get_data_for_timeframe("1min")
# Initialize empty signal series for backtester compatibility
# Note: These will be populated after strategy processing
backtester.strategies["buy_signals"] = pd.Series(False, index=range(len(min1_data)))
backtester.strategies["sell_signals"] = pd.Series(False, index=range(len(min1_data)))
backtester.strategies["stop_loss_pct"] = self.params.get("stop_loss_pct", 0.05)
backtester.strategies["primary_timeframe"] = "1min"
# Run strategy processing on 1-minute data
self._run_strategy_processing(backtester)
self.initialized = True
def _run_strategy_processing(self, backtester) -> None:
"""
Run the actual BBRS strategy processing.
Uses the Strategy class from cycles.Analysis.strategies to process
the 1-minute data. The Strategy class will handle internal resampling
to the timeframes it needs (15min, 1h, etc.) and generate buy/sell signals.
Args:
backtester: Backtest instance with timeframes_data available
"""
from cycles.Analysis.bb_rsi import BollingerBandsStrategy
# Get 1-minute data for strategy processing - let Strategy class handle resampling
strategy_data = self.get_data_for_timeframe("1min")
# Configure strategy parameters with defaults
config_strategy = {
"bb_width": self.params.get("bb_width", 0.05),
"bb_period": self.params.get("bb_period", 20),
"rsi_period": self.params.get("rsi_period", 14),
"trending": {
"rsi_threshold": self.params.get("trending_rsi_threshold", [30, 70]),
"bb_std_dev_multiplier": self.params.get("trending_bb_multiplier", 2.5),
},
"sideways": {
"rsi_threshold": self.params.get("sideways_rsi_threshold", [40, 60]),
"bb_std_dev_multiplier": self.params.get("sideways_bb_multiplier", 1.8),
},
"strategy_name": self.params.get("strategy_name", "MarketRegimeStrategy"),
"SqueezeStrategy": self.params.get("SqueezeStrategy", True)
}
# Run strategy processing on 1-minute data - Strategy class handles internal resampling
strategy = BollingerBandsStrategy(config=config_strategy, logging=logging)
processed_data = strategy.run(strategy_data, config_strategy["strategy_name"])
# Store processed data for plotting and analysis
backtester.processed_data = processed_data
if processed_data.empty:
# If strategy processing failed, keep empty signals
return
# Extract signals from processed data
buy_signals_raw = processed_data.get('BuySignal', pd.Series(False, index=processed_data.index)).astype(bool)
sell_signals_raw = processed_data.get('SellSignal', pd.Series(False, index=processed_data.index)).astype(bool)
# The processed_data will be on whatever timeframe the Strategy class outputs
# We need to map these signals back to 1-minute resolution for backtesting
original_1min_data = self.get_data_for_timeframe("1min")
# Reindex signals to 1-minute resolution using forward-fill
buy_signals_1min = buy_signals_raw.reindex(original_1min_data.index, method='ffill').fillna(False)
sell_signals_1min = sell_signals_raw.reindex(original_1min_data.index, method='ffill').fillna(False)
# Convert to integer index to match backtester expectations
backtester.strategies["buy_signals"] = pd.Series(buy_signals_1min.values, index=range(len(buy_signals_1min)))
backtester.strategies["sell_signals"] = pd.Series(sell_signals_1min.values, index=range(len(sell_signals_1min)))
def get_entry_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate entry signal based on BBRS buy signals.
Entry occurs when the BBRS strategy processing has generated
a buy signal based on Bollinger Bands and RSI conditions on
the primary timeframe.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Entry signal if buy condition met, hold otherwise
"""
if not self.initialized:
return StrategySignal("HOLD", confidence=0.0)
if df_index >= len(backtester.strategies["buy_signals"]):
return StrategySignal("HOLD", confidence=0.0)
if backtester.strategies["buy_signals"].iloc[df_index]:
# High confidence for BBRS buy signals
confidence = self._calculate_signal_confidence(backtester, df_index, "entry")
return StrategySignal("ENTRY", confidence=confidence)
return StrategySignal("HOLD", confidence=0.0)
def get_exit_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate exit signal based on BBRS sell signals or stop loss.
Exit occurs when:
1. BBRS strategy generates a sell signal
2. Stop loss is triggered based on price movement
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Exit signal with type and price, or hold signal
"""
if not self.initialized:
return StrategySignal("HOLD", confidence=0.0)
if df_index >= len(backtester.strategies["sell_signals"]):
return StrategySignal("HOLD", confidence=0.0)
# Check for sell signal
if backtester.strategies["sell_signals"].iloc[df_index]:
confidence = self._calculate_signal_confidence(backtester, df_index, "exit")
return StrategySignal("EXIT", confidence=confidence,
metadata={"type": "SELL_SIGNAL"})
# Check for stop loss using 1-minute data for precision
stop_loss_result, sell_price = self._check_stop_loss(backtester)
if stop_loss_result:
return StrategySignal("EXIT", confidence=1.0, price=sell_price,
metadata={"type": "STOP_LOSS"})
return StrategySignal("HOLD", confidence=0.0)
def get_confidence(self, backtester, df_index: int) -> float:
"""
Get strategy confidence based on signal strength and market conditions.
Confidence can be enhanced by analyzing multiple timeframes and
market regime consistency.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
float: Confidence level (0.0 to 1.0)
"""
if not self.initialized:
return 0.0
# Check for active signals
has_buy_signal = (df_index < len(backtester.strategies["buy_signals"]) and
backtester.strategies["buy_signals"].iloc[df_index])
has_sell_signal = (df_index < len(backtester.strategies["sell_signals"]) and
backtester.strategies["sell_signals"].iloc[df_index])
if has_buy_signal or has_sell_signal:
signal_type = "entry" if has_buy_signal else "exit"
return self._calculate_signal_confidence(backtester, df_index, signal_type)
# Moderate confidence during neutral periods
return 0.5
def _calculate_signal_confidence(self, backtester, df_index: int, signal_type: str) -> float:
"""
Calculate confidence level for a signal based on multiple factors.
Can consider multiple timeframes, market regime, volatility, etc.
Args:
backtester: Backtest instance
df_index: Current index
signal_type: "entry" or "exit"
Returns:
float: Confidence level (0.0 to 1.0)
"""
base_confidence = 1.0
# TODO: Implement multi-timeframe confirmation
# For now, return high confidence for primary signals
# Future enhancements could include:
# - Checking confirmation from additional timeframes
# - Analyzing market regime consistency
# - Considering volatility levels
# - RSI and BB position analysis
return base_confidence
def _check_stop_loss(self, backtester) -> Tuple[bool, Optional[float]]:
"""
Check if stop loss is triggered using 1-minute data for precision.
Uses 1-minute data regardless of primary timeframe to ensure
accurate stop loss execution.
Args:
backtester: Backtest instance with current trade state
Returns:
Tuple[bool, Optional[float]]: (stop_loss_triggered, sell_price)
"""
# Calculate stop loss price
stop_price = backtester.entry_price * (1 - backtester.strategies["stop_loss_pct"])
# Use 1-minute data for precise stop loss checking
min1_data = self.get_data_for_timeframe("1min")
if min1_data is None:
# Fallback to original_df if 1min timeframe not available
min1_data = backtester.original_df if hasattr(backtester, 'original_df') else backtester.min1_df
min1_index = min1_data.index
# Find data range from entry to current time
start_candidates = min1_index[min1_index >= backtester.entry_time]
if len(start_candidates) == 0:
return False, None
backtester.current_trade_min1_start_idx = start_candidates[0]
end_candidates = min1_index[min1_index <= backtester.current_date]
if len(end_candidates) == 0:
return False, None
backtester.current_min1_end_idx = end_candidates[-1]
# Check if any candle in the range triggered stop loss
min1_slice = min1_data.loc[backtester.current_trade_min1_start_idx:backtester.current_min1_end_idx]
if (min1_slice['low'] <= stop_price).any():
# Find the first candle that triggered stop loss
stop_candle = min1_slice[min1_slice['low'] <= stop_price].iloc[0]
# Use open price if it gapped below stop, otherwise use stop price
if stop_candle['open'] < stop_price:
sell_price = stop_candle['open']
else:
sell_price = stop_price
return True, sell_price
return False, None

View File

@ -1,254 +0,0 @@
"""
Default Meta-Trend Strategy
This module implements the default trading strategy based on meta-trend analysis
using multiple Supertrend indicators. The strategy enters when trends align
and exits on trend reversal or stop loss.
The meta-trend is calculated by comparing three Supertrend indicators:
- Entry: When meta-trend changes from != 1 to == 1
- Exit: When meta-trend changes to -1 or stop loss is triggered
"""
import numpy as np
from typing import Tuple, Optional, List
from .base import StrategyBase, StrategySignal
class DefaultStrategy(StrategyBase):
"""
Default meta-trend strategy implementation.
This strategy uses multiple Supertrend indicators to determine market direction.
It generates entry signals when all three Supertrend indicators align in an
upward direction, and exit signals when they reverse or stop loss is triggered.
The strategy works best on 15-minute timeframes but can be configured for other timeframes.
Parameters:
stop_loss_pct (float): Stop loss percentage (default: 0.03)
timeframe (str): Preferred timeframe for analysis (default: "15min")
Example:
strategy = DefaultStrategy(weight=1.0, params={"stop_loss_pct": 0.05, "timeframe": "15min"})
"""
def __init__(self, weight: float = 1.0, params: Optional[dict] = None):
"""
Initialize the default strategy.
Args:
weight: Strategy weight for combination (default: 1.0)
params: Strategy parameters including stop_loss_pct and timeframe
"""
super().__init__("default", weight, params)
def get_timeframes(self) -> List[str]:
"""
Get the timeframes required by the default strategy.
The default strategy works on a single timeframe (typically 15min)
but also needs 1min data for precise stop-loss execution.
Returns:
List[str]: List containing primary timeframe and 1min for stop-loss
"""
primary_timeframe = self.params.get("timeframe", "15min")
# Always include 1min for stop-loss precision, avoid duplicates
timeframes = [primary_timeframe]
if primary_timeframe != "1min":
timeframes.append("1min")
return timeframes
def initialize(self, backtester) -> None:
"""
Initialize meta trend calculation using Supertrend indicators.
Calculates the meta-trend by comparing three Supertrend indicators.
When all three agree on direction, meta-trend follows that direction.
Otherwise, meta-trend is neutral (0).
Args:
backtester: Backtest instance with OHLCV data
"""
from cycles.Analysis.supertrend import Supertrends
# First, resample the original 1-minute data to required timeframes
self._resample_data(backtester.original_df)
# Get the primary timeframe data for strategy calculations
primary_timeframe = self.get_timeframes()[0]
strategy_data = self.get_data_for_timeframe(primary_timeframe)
# Calculate Supertrend indicators on the primary timeframe
supertrends = Supertrends(strategy_data, verbose=False)
supertrend_results_list = supertrends.calculate_supertrend_indicators()
# Extract trend arrays from each Supertrend
trends = [st['results']['trend'] for st in supertrend_results_list]
trends_arr = np.stack(trends, axis=1)
# Calculate meta-trend: all three must agree for direction signal
meta_trend = np.where(
(trends_arr[:,0] == trends_arr[:,1]) & (trends_arr[:,1] == trends_arr[:,2]),
trends_arr[:,0],
0 # Neutral when trends don't agree
)
# Store in backtester for access during trading
# Note: backtester.df should now be using our primary timeframe
backtester.strategies["meta_trend"] = meta_trend
backtester.strategies["stop_loss_pct"] = self.params.get("stop_loss_pct", 0.03)
backtester.strategies["primary_timeframe"] = primary_timeframe
self.initialized = True
def get_entry_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate entry signal based on meta-trend direction change.
Entry occurs when meta-trend changes from != 1 to == 1, indicating
all Supertrend indicators now agree on upward direction.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Entry signal if trend aligns, hold signal otherwise
"""
if not self.initialized:
return StrategySignal("HOLD", 0.0)
if df_index < 2: # shifting one index to prevent lookahead bias
return StrategySignal("HOLD", 0.0)
# Check for meta-trend entry condition
prev_trend = backtester.strategies["meta_trend"][df_index - 2]
curr_trend = backtester.strategies["meta_trend"][df_index - 1]
if prev_trend != 1 and curr_trend == 1:
# Strong confidence when all indicators align for entry
return StrategySignal("ENTRY", confidence=1.0)
return StrategySignal("HOLD", confidence=0.0)
def get_exit_signal(self, backtester, df_index: int) -> StrategySignal:
"""
Generate exit signal based on meta-trend reversal or stop loss.
Exit occurs when:
1. Meta-trend changes to -1 (trend reversal)
2. Stop loss is triggered based on price movement
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
StrategySignal: Exit signal with type and price, or hold signal
"""
if not self.initialized:
return StrategySignal("HOLD", 0.0)
if df_index < 1:
return StrategySignal("HOLD", 0.0)
# Check for meta-trend exit signal
prev_trend = backtester.strategies["meta_trend"][df_index - 1]
curr_trend = backtester.strategies["meta_trend"][df_index]
if prev_trend != 1 and curr_trend == -1:
return StrategySignal("EXIT", confidence=1.0,
metadata={"type": "META_TREND_EXIT_SIGNAL"})
# Check for stop loss using 1-minute data for precision
stop_loss_result, sell_price = self._check_stop_loss(backtester)
if stop_loss_result:
return StrategySignal("EXIT", confidence=1.0, price=sell_price,
metadata={"type": "STOP_LOSS"})
return StrategySignal("HOLD", confidence=0.0)
def get_confidence(self, backtester, df_index: int) -> float:
"""
Get strategy confidence based on meta-trend strength.
Higher confidence when meta-trend is strongly directional,
lower confidence during neutral periods.
Args:
backtester: Backtest instance with current state
df_index: Current index in the primary timeframe dataframe
Returns:
float: Confidence level (0.0 to 1.0)
"""
if not self.initialized or df_index >= len(backtester.strategies["meta_trend"]):
return 0.0
curr_trend = backtester.strategies["meta_trend"][df_index]
# High confidence for strong directional signals
if curr_trend == 1 or curr_trend == -1:
return 1.0
# Low confidence for neutral trend
return 0.3
def _check_stop_loss(self, backtester) -> Tuple[bool, Optional[float]]:
"""
Check if stop loss is triggered based on price movement.
Uses 1-minute data for precise stop loss checking regardless of
the primary timeframe used for strategy signals.
Args:
backtester: Backtest instance with current trade state
Returns:
Tuple[bool, Optional[float]]: (stop_loss_triggered, sell_price)
"""
# Calculate stop loss price
stop_price = backtester.entry_price * (1 - backtester.strategies["stop_loss_pct"])
# Use 1-minute data for precise stop loss checking
min1_data = self.get_data_for_timeframe("1min")
if min1_data is None:
# Fallback to original_df if 1min timeframe not available
min1_data = backtester.original_df if hasattr(backtester, 'original_df') else backtester.min1_df
min1_index = min1_data.index
# Find data range from entry to current time
start_candidates = min1_index[min1_index >= backtester.entry_time]
if len(start_candidates) == 0:
return False, None
backtester.current_trade_min1_start_idx = start_candidates[0]
end_candidates = min1_index[min1_index <= backtester.current_date]
if len(end_candidates) == 0:
return False, None
backtester.current_min1_end_idx = end_candidates[-1]
# Check if any candle in the range triggered stop loss
min1_slice = min1_data.loc[backtester.current_trade_min1_start_idx:backtester.current_min1_end_idx]
if (min1_slice['low'] <= stop_price).any():
# Find the first candle that triggered stop loss
stop_candle = min1_slice[min1_slice['low'] <= stop_price].iloc[0]
# Use open price if it gapped below stop, otherwise use stop price
if stop_candle['open'] < stop_price:
sell_price = stop_candle['open']
else:
sell_price = stop_price
return True, sell_price
return False, None

View File

@ -1,397 +0,0 @@
"""
Strategy Manager
This module contains the StrategyManager class that orchestrates multiple trading strategies
and combines their signals using configurable aggregation rules.
The StrategyManager supports various combination methods for entry and exit signals:
- Entry: any, all, majority, weighted_consensus
- Exit: any, all, priority (with stop loss prioritization)
"""
from typing import Dict, List, Tuple, Optional
import logging
from .base import StrategyBase, StrategySignal
from .default_strategy import DefaultStrategy
from .bbrs_strategy import BBRSStrategy
from .random_strategy import RandomStrategy
class StrategyManager:
"""
Manages multiple strategies and combines their signals.
The StrategyManager loads multiple strategies from configuration,
initializes them with backtester data, and combines their signals
using configurable aggregation rules.
Attributes:
strategies (List[StrategyBase]): List of loaded strategies
combination_rules (Dict): Rules for combining signals
initialized (bool): Whether manager has been initialized
Example:
config = {
"strategies": [
{"name": "default", "weight": 0.6, "params": {}},
{"name": "bbrs", "weight": 0.4, "params": {"bb_width": 0.05}}
],
"combination_rules": {
"entry": "weighted_consensus",
"exit": "any",
"min_confidence": 0.6
}
}
manager = StrategyManager(config["strategies"], config["combination_rules"])
"""
def __init__(self, strategies_config: List[Dict], combination_rules: Optional[Dict] = None):
"""
Initialize the strategy manager.
Args:
strategies_config: List of strategy configurations
combination_rules: Rules for combining signals
"""
self.strategies = self._load_strategies(strategies_config)
self.combination_rules = combination_rules or {
"entry": "weighted_consensus",
"exit": "any",
"min_confidence": 0.5
}
self.initialized = False
def _load_strategies(self, strategies_config: List[Dict]) -> List[StrategyBase]:
"""
Load strategies from configuration.
Creates strategy instances based on configuration and registers
them with the manager. Supports extensible strategy registration.
Args:
strategies_config: List of strategy configurations
Returns:
List[StrategyBase]: List of instantiated strategies
Raises:
ValueError: If unknown strategy name is specified
"""
strategies = []
for config in strategies_config:
name = config.get("name", "").lower()
weight = config.get("weight", 1.0)
params = config.get("params", {})
if name == "default":
strategies.append(DefaultStrategy(weight, params))
elif name == "bbrs":
strategies.append(BBRSStrategy(weight, params))
elif name == "random":
strategies.append(RandomStrategy(weight, params))
else:
raise ValueError(f"Unknown strategy: {name}. "
f"Available strategies: default, bbrs, random")
return strategies
def initialize(self, backtester) -> None:
"""
Initialize all strategies with backtester data.
Calls the initialize method on each strategy, allowing them
to set up indicators, validate data, and prepare for trading.
Each strategy will handle its own timeframe resampling.
Args:
backtester: Backtest instance with OHLCV data
"""
for strategy in self.strategies:
try:
strategy.initialize(backtester)
# Log strategy timeframe information
timeframes = strategy.get_timeframes()
logging.info(f"Initialized strategy: {strategy.name} with timeframes: {timeframes}")
except Exception as e:
logging.error(f"Failed to initialize strategy {strategy.name}: {e}")
raise
self.initialized = True
logging.info(f"Strategy manager initialized with {len(self.strategies)} strategies")
# Log summary of all timeframes being used
all_timeframes = set()
for strategy in self.strategies:
all_timeframes.update(strategy.get_timeframes())
logging.info(f"Total unique timeframes in use: {sorted(all_timeframes)}")
def get_entry_signal(self, backtester, df_index: int) -> bool:
"""
Get combined entry signal from all strategies.
Collects entry signals from all strategies and combines them
according to the configured combination rules.
Args:
backtester: Backtest instance with current state
df_index: Current index in the dataframe
Returns:
bool: True if combined signal suggests entry, False otherwise
"""
if not self.initialized:
return False
# Collect signals from all strategies
signals = {}
for strategy in self.strategies:
try:
signal = strategy.get_entry_signal(backtester, df_index)
signals[strategy.name] = {
"signal": signal,
"weight": strategy.weight,
"confidence": signal.confidence
}
except Exception as e:
logging.warning(f"Strategy {strategy.name} entry signal failed: {e}")
signals[strategy.name] = {
"signal": StrategySignal("HOLD", 0.0),
"weight": strategy.weight,
"confidence": 0.0
}
return self._combine_entry_signals(signals)
def get_exit_signal(self, backtester, df_index: int) -> Tuple[Optional[str], Optional[float]]:
"""
Get combined exit signal from all strategies.
Collects exit signals from all strategies and combines them
according to the configured combination rules.
Args:
backtester: Backtest instance with current state
df_index: Current index in the dataframe
Returns:
Tuple[Optional[str], Optional[float]]: (exit_type, exit_price) or (None, None)
"""
if not self.initialized:
return None, None
# Collect signals from all strategies
signals = {}
for strategy in self.strategies:
try:
signal = strategy.get_exit_signal(backtester, df_index)
signals[strategy.name] = {
"signal": signal,
"weight": strategy.weight,
"confidence": signal.confidence
}
except Exception as e:
logging.warning(f"Strategy {strategy.name} exit signal failed: {e}")
signals[strategy.name] = {
"signal": StrategySignal("HOLD", 0.0),
"weight": strategy.weight,
"confidence": 0.0
}
return self._combine_exit_signals(signals)
def _combine_entry_signals(self, signals: Dict) -> bool:
"""
Combine entry signals based on combination rules.
Supports multiple combination methods:
- any: Enter if ANY strategy signals entry
- all: Enter only if ALL strategies signal entry
- majority: Enter if majority of strategies signal entry
- weighted_consensus: Enter based on weighted average confidence
Args:
signals: Dictionary of strategy signals with weights and confidence
Returns:
bool: Combined entry decision
"""
method = self.combination_rules.get("entry", "weighted_consensus")
min_confidence = self.combination_rules.get("min_confidence", 0.5)
# Filter for entry signals above minimum confidence
entry_signals = [
s for s in signals.values()
if s["signal"].signal_type == "ENTRY" and s["signal"].confidence >= min_confidence
]
if not entry_signals:
return False
if method == "any":
# Enter if any strategy signals entry
return len(entry_signals) > 0
elif method == "all":
# Enter only if all strategies signal entry
return len(entry_signals) == len(self.strategies)
elif method == "majority":
# Enter if majority of strategies signal entry
return len(entry_signals) > len(self.strategies) / 2
elif method == "weighted_consensus":
# Enter based on weighted average confidence
total_weight = sum(s["weight"] for s in entry_signals)
if total_weight == 0:
return False
weighted_confidence = sum(
s["signal"].confidence * s["weight"]
for s in entry_signals
) / total_weight
return weighted_confidence >= min_confidence
else:
logging.warning(f"Unknown entry combination method: {method}, using 'any'")
return len(entry_signals) > 0
def _combine_exit_signals(self, signals: Dict) -> Tuple[Optional[str], Optional[float]]:
"""
Combine exit signals based on combination rules.
Supports multiple combination methods:
- any: Exit if ANY strategy signals exit (recommended for risk management)
- all: Exit only if ALL strategies agree on exit
- priority: Exit based on priority order (STOP_LOSS > SELL_SIGNAL > others)
Args:
signals: Dictionary of strategy signals with weights and confidence
Returns:
Tuple[Optional[str], Optional[float]]: (exit_type, exit_price) or (None, None)
"""
method = self.combination_rules.get("exit", "any")
# Filter for exit signals
exit_signals = [
s for s in signals.values()
if s["signal"].signal_type == "EXIT"
]
if not exit_signals:
return None, None
if method == "any":
# Exit if any strategy signals exit (first one found)
for signal_data in exit_signals:
signal = signal_data["signal"]
exit_type = signal.metadata.get("type", "EXIT")
return exit_type, signal.price
elif method == "all":
# Exit only if all strategies agree on exit
if len(exit_signals) == len(self.strategies):
signal = exit_signals[0]["signal"]
exit_type = signal.metadata.get("type", "EXIT")
return exit_type, signal.price
elif method == "priority":
# Priority order: STOP_LOSS > SELL_SIGNAL > others
stop_loss_signals = [
s for s in exit_signals
if s["signal"].metadata.get("type") == "STOP_LOSS"
]
if stop_loss_signals:
signal = stop_loss_signals[0]["signal"]
return "STOP_LOSS", signal.price
sell_signals = [
s for s in exit_signals
if s["signal"].metadata.get("type") == "SELL_SIGNAL"
]
if sell_signals:
signal = sell_signals[0]["signal"]
return "SELL_SIGNAL", signal.price
# Return first available exit signal
signal = exit_signals[0]["signal"]
exit_type = signal.metadata.get("type", "EXIT")
return exit_type, signal.price
else:
logging.warning(f"Unknown exit combination method: {method}, using 'any'")
# Fallback to 'any' method
signal = exit_signals[0]["signal"]
exit_type = signal.metadata.get("type", "EXIT")
return exit_type, signal.price
return None, None
def get_strategy_summary(self) -> Dict:
"""
Get summary of loaded strategies and their configuration.
Returns:
Dict: Summary of strategies, weights, combination rules, and timeframes
"""
return {
"strategies": [
{
"name": strategy.name,
"weight": strategy.weight,
"params": strategy.params,
"timeframes": strategy.get_timeframes(),
"initialized": strategy.initialized
}
for strategy in self.strategies
],
"combination_rules": self.combination_rules,
"total_strategies": len(self.strategies),
"initialized": self.initialized,
"all_timeframes": list(set().union(*[strategy.get_timeframes() for strategy in self.strategies]))
}
def __repr__(self) -> str:
"""String representation of the strategy manager."""
strategy_names = [s.name for s in self.strategies]
return (f"StrategyManager(strategies={strategy_names}, "
f"initialized={self.initialized})")
def create_strategy_manager(config: Dict) -> StrategyManager:
"""
Factory function to create StrategyManager from configuration.
Provides a convenient way to create a StrategyManager instance
from a configuration dictionary.
Args:
config: Configuration dictionary with strategies and combination_rules
Returns:
StrategyManager: Configured strategy manager instance
Example:
config = {
"strategies": [
{"name": "default", "weight": 1.0, "params": {}}
],
"combination_rules": {
"entry": "any",
"exit": "any"
}
}
manager = create_strategy_manager(config)
"""
strategies_config = config.get("strategies", [])
combination_rules = config.get("combination_rules", {})
if not strategies_config:
raise ValueError("No strategies specified in configuration")
return StrategyManager(strategies_config, combination_rules)

View File

@ -1,218 +0,0 @@
"""
Random Strategy for Testing
This strategy generates random entry and exit signals for testing the strategy system.
It's useful for verifying that the strategy framework is working correctly.
"""
import random
import logging
from typing import Dict, List, Optional
import pandas as pd
from .base import StrategyBase, StrategySignal
logger = logging.getLogger(__name__)
class RandomStrategy(StrategyBase):
"""
Random signal generator strategy for testing.
This strategy generates random entry and exit signals with configurable
probability and confidence levels. It's designed to test the strategy
framework and signal processing system.
Parameters:
entry_probability: Probability of generating an entry signal (0.0-1.0)
exit_probability: Probability of generating an exit signal (0.0-1.0)
min_confidence: Minimum confidence level for signals
max_confidence: Maximum confidence level for signals
timeframe: Timeframe to operate on (default: "1min")
signal_frequency: How often to generate signals (every N bars)
"""
def __init__(self, weight: float = 1.0, params: Optional[Dict] = None):
"""Initialize the random strategy."""
super().__init__("random", weight, params)
# Strategy parameters with defaults
self.entry_probability = self.params.get("entry_probability", 0.05) # 5% chance per bar
self.exit_probability = self.params.get("exit_probability", 0.1) # 10% chance per bar
self.min_confidence = self.params.get("min_confidence", 0.6)
self.max_confidence = self.params.get("max_confidence", 0.9)
self.timeframe = self.params.get("timeframe", "1min")
self.signal_frequency = self.params.get("signal_frequency", 1) # Every bar
# Internal state
self.bar_count = 0
self.last_signal_bar = -1
self.last_processed_timestamp = None # Track last processed timestamp to avoid duplicates
logger.info(f"RandomStrategy initialized with entry_prob={self.entry_probability}, "
f"exit_prob={self.exit_probability}, timeframe={self.timeframe}")
def get_timeframes(self) -> List[str]:
"""Return required timeframes for this strategy."""
return [self.timeframe, "1min"] # Always include 1min for precision
def initialize(self, backtester) -> None:
"""Initialize strategy with backtester data."""
try:
logger.info(f"RandomStrategy: Starting initialization...")
# Resample data to required timeframes
self._resample_data(backtester.original_df)
# Get primary timeframe data
self.df = self.get_primary_timeframe_data()
if self.df is None or self.df.empty:
raise ValueError(f"No data available for timeframe {self.timeframe}")
# Reset internal state
self.bar_count = 0
self.last_signal_bar = -1
self.initialized = True
logger.info(f"RandomStrategy initialized with {len(self.df)} bars on {self.timeframe}")
logger.info(f"RandomStrategy: Data range from {self.df.index[0]} to {self.df.index[-1]}")
except Exception as e:
logger.error(f"Failed to initialize RandomStrategy: {e}")
logger.error(f"RandomStrategy: backtester.original_df shape: {backtester.original_df.shape if hasattr(backtester, 'original_df') else 'No original_df'}")
raise
def get_entry_signal(self, backtester, df_index: int) -> StrategySignal:
"""Generate random entry signals."""
if not self.initialized:
logger.warning(f"RandomStrategy: get_entry_signal called but not initialized")
return StrategySignal("HOLD", 0.0)
try:
# Get current timestamp to avoid duplicate signals
current_timestamp = None
if hasattr(backtester, 'original_df') and not backtester.original_df.empty:
current_timestamp = backtester.original_df.index[-1]
# Skip if we already processed this timestamp
if current_timestamp and self.last_processed_timestamp == current_timestamp:
return StrategySignal("HOLD", 0.0)
self.bar_count += 1
# Debug logging every 10 bars
if self.bar_count % 10 == 0:
logger.info(f"RandomStrategy: Processing bar {self.bar_count}, df_index={df_index}, timestamp={current_timestamp}")
# Check if we should generate a signal based on frequency
if (self.bar_count - self.last_signal_bar) < self.signal_frequency:
return StrategySignal("HOLD", 0.0)
# Generate random entry signal
random_value = random.random()
if random_value < self.entry_probability:
confidence = random.uniform(self.min_confidence, self.max_confidence)
self.last_signal_bar = self.bar_count
self.last_processed_timestamp = current_timestamp # Update last processed timestamp
# Get current price from backtester's original data (more reliable)
try:
if hasattr(backtester, 'original_df') and not backtester.original_df.empty:
# Use the last available price from the original data
current_price = backtester.original_df['close'].iloc[-1]
elif hasattr(backtester, 'df') and not backtester.df.empty:
# Fallback to backtester's main dataframe
current_price = backtester.df['close'].iloc[min(df_index, len(backtester.df)-1)]
else:
# Last resort: use our internal dataframe
current_price = self.df.iloc[min(df_index, len(self.df)-1)]['close']
except (IndexError, KeyError) as e:
logger.warning(f"RandomStrategy: Error getting current price: {e}, using fallback")
current_price = self.df.iloc[-1]['close'] if not self.df.empty else 50000.0
logger.info(f"RandomStrategy: Generated ENTRY signal at bar {self.bar_count}, "
f"price=${current_price:.2f}, confidence={confidence:.2f}, random_value={random_value:.3f}")
return StrategySignal(
"ENTRY",
confidence=confidence,
price=current_price,
metadata={
"strategy": "random",
"bar_count": self.bar_count,
"timeframe": self.timeframe
}
)
# Update timestamp even if no signal generated
if current_timestamp:
self.last_processed_timestamp = current_timestamp
return StrategySignal("HOLD", 0.0)
except Exception as e:
logger.error(f"RandomStrategy entry signal error: {e}")
return StrategySignal("HOLD", 0.0)
def get_exit_signal(self, backtester, df_index: int) -> StrategySignal:
"""Generate random exit signals."""
if not self.initialized:
return StrategySignal("HOLD", 0.0)
try:
# Only generate exit signals if we have an open position
# This is handled by the strategy trader, but we can add logic here
# Generate random exit signal
if random.random() < self.exit_probability:
confidence = random.uniform(self.min_confidence, self.max_confidence)
# Get current price from backtester's original data (more reliable)
try:
if hasattr(backtester, 'original_df') and not backtester.original_df.empty:
# Use the last available price from the original data
current_price = backtester.original_df['close'].iloc[-1]
elif hasattr(backtester, 'df') and not backtester.df.empty:
# Fallback to backtester's main dataframe
current_price = backtester.df['close'].iloc[min(df_index, len(backtester.df)-1)]
else:
# Last resort: use our internal dataframe
current_price = self.df.iloc[min(df_index, len(self.df)-1)]['close']
except (IndexError, KeyError) as e:
logger.warning(f"RandomStrategy: Error getting current price for exit: {e}, using fallback")
current_price = self.df.iloc[-1]['close'] if not self.df.empty else 50000.0
# Randomly choose exit type
exit_types = ["SELL_SIGNAL", "TAKE_PROFIT", "STOP_LOSS"]
exit_type = random.choice(exit_types)
logger.info(f"RandomStrategy: Generated EXIT signal at bar {self.bar_count}, "
f"price=${current_price:.2f}, confidence={confidence:.2f}, type={exit_type}")
return StrategySignal(
"EXIT",
confidence=confidence,
price=current_price,
metadata={
"type": exit_type,
"strategy": "random",
"bar_count": self.bar_count,
"timeframe": self.timeframe
}
)
return StrategySignal("HOLD", 0.0)
except Exception as e:
logger.error(f"RandomStrategy exit signal error: {e}")
return StrategySignal("HOLD", 0.0)
def get_confidence(self, backtester, df_index: int) -> float:
"""Return random confidence level."""
return random.uniform(self.min_confidence, self.max_confidence)
def __repr__(self) -> str:
"""String representation of the strategy."""
return (f"RandomStrategy(entry_prob={self.entry_probability}, "
f"exit_prob={self.exit_probability}, timeframe={self.timeframe})")

View File

@ -1,80 +1,5 @@
import pandas as pd
def check_data(data_df: pd.DataFrame) -> bool:
"""
Checks if the input DataFrame has a DatetimeIndex.
Args:
data_df (pd.DataFrame): DataFrame to check.
Returns:
bool: True if the DataFrame has a DatetimeIndex, False otherwise.
"""
if not isinstance(data_df.index, pd.DatetimeIndex):
print("Warning: Input DataFrame must have a DatetimeIndex.")
return False
agg_rules = {}
# Define aggregation rules based on available columns
if 'open' in data_df.columns:
agg_rules['open'] = 'first'
if 'high' in data_df.columns:
agg_rules['high'] = 'max'
if 'low' in data_df.columns:
agg_rules['low'] = 'min'
if 'close' in data_df.columns:
agg_rules['close'] = 'last'
if 'volume' in data_df.columns:
agg_rules['volume'] = 'sum'
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for daily aggregation.")
return False
return agg_rules
def aggregate_to_weekly(data_df: pd.DataFrame, weeks: int = 1) -> pd.DataFrame:
"""
Aggregates time-series financial data to weekly OHLCV format.
The input DataFrame is expected to have a DatetimeIndex.
'open' will be the first 'open' price of the week.
'close' will be the last 'close' price of the week.
'high' will be the maximum 'high' price of the week.
'low' will be the minimum 'low' price of the week.
'volume' (if present) will be the sum of volumes for the week.
Args:
data_df (pd.DataFrame): DataFrame with a DatetimeIndex and columns
like 'open', 'high', 'low', 'close', and optionally 'volume'.
weeks (int): The number of weeks to aggregate to. Default is 1.
Returns:
pd.DataFrame: DataFrame aggregated to weekly OHLCV data.
The index will be a DatetimeIndex with the time set to the start of the week.
Returns an empty DataFrame if no relevant OHLCV columns are found.
"""
agg_rules = check_data(data_df)
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for weekly aggregation.")
return pd.DataFrame(index=pd.to_datetime([]))
# Resample to weekly frequency and apply aggregation rules
weekly_data = data_df.resample(f'{weeks}W').agg(agg_rules)
weekly_data.dropna(how='all', inplace=True)
# Adjust timestamps to the start of the week
if not weekly_data.empty and isinstance(weekly_data.index, pd.DatetimeIndex):
weekly_data.index = weekly_data.index.floor('W')
return weekly_data
def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
"""
Aggregates time-series financial data to daily OHLCV format.
@ -99,9 +24,23 @@ def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
Raises:
ValueError: If the input DataFrame does not have a DatetimeIndex.
"""
agg_rules = check_data(data_df)
if not isinstance(data_df.index, pd.DatetimeIndex):
raise ValueError("Input DataFrame must have a DatetimeIndex.")
agg_rules = {}
# Define aggregation rules based on available columns
if 'open' in data_df.columns:
agg_rules['open'] = 'first'
if 'high' in data_df.columns:
agg_rules['high'] = 'max'
if 'low' in data_df.columns:
agg_rules['low'] = 'min'
if 'close' in data_df.columns:
agg_rules['close'] = 'last'
if 'volume' in data_df.columns:
agg_rules['volume'] = 'sum'
if not agg_rules:
# Log a warning or raise an error if no relevant columns are found
# For now, returning an empty DataFrame with a message might be suitable for some cases
@ -119,81 +58,3 @@ def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
daily_data.dropna(how='all', inplace=True)
return daily_data
def aggregate_to_hourly(data_df: pd.DataFrame, hours: int = 1) -> pd.DataFrame:
"""
Aggregates time-series financial data to hourly OHLCV format.
The input DataFrame is expected to have a DatetimeIndex.
'open' will be the first 'open' price of the hour.
'close' will be the last 'close' price of the hour.
'high' will be the maximum 'high' price of the hour.
'low' will be the minimum 'low' price of the hour.
'volume' (if present) will be the sum of volumes for the hour.
Args:
data_df (pd.DataFrame): DataFrame with a DatetimeIndex and columns
like 'open', 'high', 'low', 'close', and optionally 'volume'.
hours (int): The number of hours to aggregate to. Default is 1.
Returns:
pd.DataFrame: DataFrame aggregated to hourly OHLCV data.
The index will be a DatetimeIndex with the time set to the start of the hour.
Returns an empty DataFrame if no relevant OHLCV columns are found.
"""
agg_rules = check_data(data_df)
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for hourly aggregation.")
return pd.DataFrame(index=pd.to_datetime([]))
# Resample to hourly frequency and apply aggregation rules
hourly_data = data_df.resample(f'{hours}h').agg(agg_rules)
hourly_data.dropna(how='all', inplace=True)
# Adjust timestamps to the start of the hour
if not hourly_data.empty and isinstance(hourly_data.index, pd.DatetimeIndex):
hourly_data.index = hourly_data.index.floor('h')
return hourly_data
def aggregate_to_minutes(data_df: pd.DataFrame, minutes: int) -> pd.DataFrame:
"""
Aggregates time-series financial data to N-minute OHLCV format.
The input DataFrame is expected to have a DatetimeIndex.
'open' will be the first 'open' price of the N-minute interval.
'close' will be the last 'close' price of the N-minute interval.
'high' will be the maximum 'high' price of the N-minute interval.
'low' will be the minimum 'low' price of the N-minute interval.
'volume' (if present) will be the sum of volumes for the N-minute interval.
Args:
data_df (pd.DataFrame): DataFrame with a DatetimeIndex and columns
like 'open', 'high', 'low', 'close', and optionally 'volume'.
minutes (int): The number of minutes to aggregate to.
Returns:
pd.DataFrame: DataFrame aggregated to N-minute OHLCV data.
The index will be a DatetimeIndex.
Returns an empty DataFrame if no relevant OHLCV columns are found or
if the input DataFrame does not have a DatetimeIndex.
"""
agg_rules_obj = check_data(data_df) # check_data returns rules or False
if not agg_rules_obj:
# check_data already prints a warning if index is not DatetimeIndex or no OHLCV columns
# Ensure an empty DataFrame with a DatetimeIndex is returned for consistency
return pd.DataFrame(index=pd.to_datetime([]))
# Resample to N-minute frequency and apply aggregation rules
# Using .agg(agg_rules_obj) where agg_rules_obj is the dict from check_data
resampled_data = data_df.resample(f'{minutes}min').agg(agg_rules_obj)
resampled_data.dropna(how='all', inplace=True)
return resampled_data

View File

@ -8,7 +8,6 @@ The `Analysis` module includes classes for calculating common technical indicato
- **Relative Strength Index (RSI)**: Implemented in `cycles/Analysis/rsi.py`.
- **Bollinger Bands**: Implemented in `cycles/Analysis/boillinger_band.py`.
- Note: Trading strategies are detailed in `strategies.md`.
## Class: `RSI`
@ -16,91 +15,64 @@ Found in `cycles/Analysis/rsi.py`.
Calculates the Relative Strength Index.
### Mathematical Model
The standard RSI calculation typically involves Wilder's smoothing for average gains and losses.
1. **Price Change (Delta)**: Difference between consecutive closing prices.
2. **Gain and Loss**: Separate positive (gain) and negative (loss, expressed as positive) price changes.
3. **Average Gain (AvgU)** and **Average Loss (AvgD)**: Smoothed averages of gains and losses over the RSI period. Wilder's smoothing is a specific type of exponential moving average (EMA):
- Initial AvgU/AvgD: Simple Moving Average (SMA) over the first `period` values.
- Subsequent AvgU: `(Previous AvgU * (period - 1) + Current Gain) / period`
- Subsequent AvgD: `(Previous AvgD * (period - 1) + Current Loss) / period`
4. **Relative Strength (RS)**:
1. **Average Gain (AvgU)** and **Average Loss (AvgD)** over 14 periods:
$$
RS = \\frac{\\text{AvgU}}{\\text{AvgD}}
\text{AvgU} = \frac{\sum \text{Upward Price Changes}}{14}, \quad \text{AvgD} = \frac{\sum \text{Downward Price Changes}}{14}
$$
5. **RSI**:
2. **Relative Strength (RS)**:
$$
RSI = 100 - \\frac{100}{1 + RS}
RS = \frac{\text{AvgU}}{\text{AvgD}}
$$
3. **RSI**:
$$
Special conditions:
- If AvgD is 0: RSI is 100 if AvgU > 0, or 50 if AvgU is also 0 (neutral).
RSI = 100 - \frac{100}{1 + RS}
$$
### `__init__(self, config: dict)`
### `__init__(self, period: int = 14)`
- **Description**: Initializes the RSI calculator.
- **Parameters**:\n - `config` (dict): Configuration dictionary. Must contain an `'rsi_period'` key with a positive integer value (e.g., `{'rsi_period': 14}`).
- **Parameters**:
- `period` (int, optional): The period for RSI calculation. Defaults to 14. Must be a positive integer.
### `calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame`
- **Description**: Calculates the RSI (using Wilder's smoothing by default) and adds it as an 'RSI' column to the input DataFrame. This method utilizes `calculate_custom_rsi` internally with `smoothing='EMA'`.
- **Parameters**:\n - `data_df` (pd.DataFrame): DataFrame with historical price data. Must contain the `price_column`.\n - `price_column` (str, optional): The name of the column containing price data. Defaults to 'close'.
- **Returns**: `pd.DataFrame` - A copy of the input DataFrame with an added 'RSI' column. If data length is insufficient for the period, the 'RSI' column will contain `np.nan`.
### `calculate_custom_rsi(price_series: pd.Series, window: int = 14, smoothing: str = 'SMA') -> pd.Series` (Static Method)
- **Description**: Calculates RSI with a specified window and smoothing method (SMA or EMA). This is the core calculation engine.
- **Description**: Calculates the RSI and adds it as an 'RSI' column to the input DataFrame. Handles cases where data length is less than the period by returning the original DataFrame with a warning.
- **Parameters**:
- `price_series` (pd.Series): Series of prices.
- `window` (int, optional): The period for RSI calculation. Defaults to 14. Must be a positive integer.
- `smoothing` (str, optional): Smoothing method, can be 'SMA' (Simple Moving Average) or 'EMA' (Exponential Moving Average, specifically Wilder's smoothing when `alpha = 1/window`). Defaults to 'SMA'.
- **Returns**: `pd.Series` - Series containing the RSI values. Returns a series of NaNs if data length is insufficient.
- `data_df` (pd.DataFrame): DataFrame with historical price data. Must contain the `price_column`.
- `price_column` (str, optional): The name of the column containing price data. Defaults to 'close'.
- **Returns**: `pd.DataFrame` - The input DataFrame with an added 'RSI' column (containing `np.nan` for initial periods where RSI cannot be calculated). Returns a copy of the original DataFrame if the period is larger than the number of data points.
## Class: `BollingerBands`
Found in `cycles/Analysis/boillinger_band.py`.
Calculates Bollinger Bands.
## **Bollinger Bands**
### Mathematical Model
1. **Middle Band**: Simple Moving Average (SMA) over `period`.
1. **Middle Band**: 20-day Simple Moving Average (SMA)
$$
\\text{Middle Band} = \\text{SMA}(\\text{price}, \\text{period})
\text{Middle Band} = \frac{1}{20} \sum_{i=1}^{20} \text{Close}_{t-i}
$$
2. **Standard Deviation (σ)**: Standard deviation of price over `period`.
3. **Upper Band**: Middle Band + `num_std` × σ
2. **Upper Band**: Middle Band + 2 × 20-day Standard Deviation (σ)
$$
\\text{Upper Band} = \\text{Middle Band} + \\text{num_std} \\times \\sigma_{\\text{period}}
\text{Upper Band} = \text{Middle Band} + 2 \times \sigma_{20}
$$
4. **Lower Band**: Middle Band `num_std` × σ
3. **Lower Band**: Middle Band 2 × 20-day Standard Deviation (σ)
$$
\\text{Lower Band} = \\text{Middle Band} - \\text{num_std} \\times \\sigma_{\\text{period}}
\text{Lower Band} = \text{Middle Band} - 2 \times \sigma_{20}
$$
For the adaptive calculation in the `calculate` method (when `squeeze=False`):
- **BBWidth**: `(Reference Upper Band - Reference Lower Band) / SMA`, where reference bands are typically calculated using a 2.0 standard deviation multiplier.
- **MarketRegime**: Determined by comparing `BBWidth` to a threshold from the configuration. `1` for sideways, `0` for trending.
- The `num_std` used for the final Upper and Lower Bands then varies based on this `MarketRegime` and the `bb_std_dev_multiplier` values for "trending" and "sideways" markets from the configuration, applied row-wise.
### `__init__(self, config: dict)`
### `__init__(self, period: int = 20, std_dev_multiplier: float = 2.0)`
- **Description**: Initializes the BollingerBands calculator.
- **Parameters**:\n - `config` (dict): Configuration dictionary. It must contain:
- `'bb_period'` (int): Positive integer for the moving average and standard deviation period.
- `'trending'` (dict): Containing `'bb_std_dev_multiplier'` (float, positive) for trending markets.
- `'sideways'` (dict): Containing `'bb_std_dev_multiplier'` (float, positive) for sideways markets.
- `'bb_width'` (float): Positive float threshold for determining market regime.
### `calculate(self, data_df: pd.DataFrame, price_column: str = 'close', squeeze: bool = False) -> pd.DataFrame`
- **Description**: Calculates Bollinger Bands and adds relevant columns to the DataFrame.
- If `squeeze` is `False` (default): Calculates adaptive Bollinger Bands. It determines the market regime (trending/sideways) based on `BBWidth` and applies different standard deviation multipliers (from the `config`) on a row-by-row basis. Adds 'SMA', 'UpperBand', 'LowerBand', 'BBWidth', and 'MarketRegime' columns.
- If `squeeze` is `True`: Calculates simpler Bollinger Bands with a fixed window of 14 and a standard deviation multiplier of 1.5 by calling `calculate_custom_bands`. Adds 'SMA', 'UpperBand', 'LowerBand' columns; 'BBWidth' and 'MarketRegime' will be `NaN`.
- **Parameters**:\n - `data_df` (pd.DataFrame): DataFrame with price data. Must include the `price_column`.\n - `price_column` (str, optional): The name of the column containing the price data. Defaults to 'close'.\n - `squeeze` (bool, optional): If `True`, calculates bands with fixed parameters (window 14, std 1.5). Defaults to `False`.
- **Returns**: `pd.DataFrame` - A copy of the original DataFrame with added Bollinger Band related columns.
### `calculate_custom_bands(price_series: pd.Series, window: int = 20, num_std: float = 2.0, min_periods: int = None) -> tuple[pd.Series, pd.Series, pd.Series]` (Static Method)
- **Description**: Calculates Bollinger Bands with a specified window, standard deviation multiplier, and minimum periods.
- **Parameters**:
- `price_series` (pd.Series): Series of prices.
- `window` (int, optional): The period for the moving average and standard deviation. Defaults to 20.
- `num_std` (float, optional): The number of standard deviations for the upper and lower bands. Defaults to 2.0.
- `min_periods` (int, optional): Minimum number of observations in window required to have a value. Defaults to `window` if `None`.
- **Returns**: `tuple[pd.Series, pd.Series, pd.Series]` - A tuple containing the Upper band, SMA, and Lower band series.
- `period` (int, optional): The period for the moving average and standard deviation. Defaults to 20. Must be a positive integer.
- `std_dev_multiplier` (float, optional): The number of standard deviations for the upper and lower bands. Defaults to 2.0. Must be positive.
### `calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame`
- **Description**: Calculates Bollinger Bands and adds 'SMA' (Simple Moving Average), 'UpperBand', and 'LowerBand' columns to the DataFrame.
- **Parameters**:
- `data_df` (pd.DataFrame): DataFrame with price data. Must include the `price_column`.
- `price_column` (str, optional): The name of the column containing the price data (e.g., 'close'). Defaults to 'close'.
- **Returns**: `pd.DataFrame` - The original DataFrame with added columns: 'SMA', 'UpperBand', 'LowerBand'.

View File

@ -1,405 +0,0 @@
# Strategies Documentation
## Overview
The Cycles framework implements advanced trading strategies with sophisticated timeframe management, signal processing, and multi-strategy combination capabilities. Each strategy can operate on its preferred timeframes while maintaining precise execution control.
## Architecture
### Strategy System Components
1. **StrategyBase**: Abstract base class with timeframe management
2. **Individual Strategies**: DefaultStrategy, BBRSStrategy implementations
3. **StrategyManager**: Multi-strategy orchestration and signal combination
4. **Timeframe System**: Automatic data resampling and signal mapping
### New Timeframe Management
Each strategy now controls its own timeframe requirements:
```python
class MyStrategy(StrategyBase):
def get_timeframes(self):
return ["15min", "1h"] # Strategy specifies needed timeframes
def initialize(self, backtester):
# Framework automatically resamples data
self._resample_data(backtester.original_df)
# Access resampled data
data_15m = self.get_data_for_timeframe("15min")
data_1h = self.get_data_for_timeframe("1h")
```
## Available Strategies
### 1. Default Strategy (Meta-Trend Analysis)
**Purpose**: Meta-trend analysis using multiple Supertrend indicators
**Timeframe Behavior**:
- **Configurable Primary Timeframe**: Set via `params["timeframe"]` (default: "15min")
- **1-Minute Precision**: Always includes 1min data for precise stop-loss execution
- **Example Timeframes**: `["15min", "1min"]` or `["5min", "1min"]`
**Configuration**:
```json
{
"name": "default",
"weight": 1.0,
"params": {
"timeframe": "15min", // Configurable: "5min", "15min", "1h", etc.
"stop_loss_pct": 0.03 // Stop loss percentage
}
}
```
**Algorithm**:
1. Calculate 3 Supertrend indicators with different parameters on primary timeframe
2. Determine meta-trend: all three must agree for directional signal
3. **Entry**: Meta-trend changes from != 1 to == 1 (all trends align upward)
4. **Exit**: Meta-trend changes to -1 (trend reversal) or stop-loss triggered
5. **Stop-Loss**: 1-minute precision using percentage-based threshold
**Strengths**:
- Robust trend following with multiple confirmations
- Configurable for different market timeframes
- Precise risk management
- Low false signals in trending markets
**Best Use Cases**:
- Medium to long-term trend following
- Markets with clear directional movements
- Risk-conscious trading with defined exits
### 2. BBRS Strategy (Bollinger Bands + RSI)
**Purpose**: Market regime-adaptive strategy combining Bollinger Bands and RSI
**Timeframe Behavior**:
- **1-Minute Input**: Strategy receives 1-minute data
- **Internal Resampling**: Underlying Strategy class handles resampling to 15min/1h
- **No Double-Resampling**: Avoids conflicts with existing resampling logic
- **Signal Mapping**: Results mapped back to 1-minute resolution
**Configuration**:
```json
{
"name": "bbrs",
"weight": 1.0,
"params": {
"bb_width": 0.05, // Bollinger Band width threshold
"bb_period": 20, // Bollinger Band period
"rsi_period": 14, // RSI calculation period
"trending_rsi_threshold": [30, 70], // RSI thresholds for trending market
"trending_bb_multiplier": 2.5, // BB multiplier for trending market
"sideways_rsi_threshold": [40, 60], // RSI thresholds for sideways market
"sideways_bb_multiplier": 1.8, // BB multiplier for sideways market
"strategy_name": "MarketRegimeStrategy", // Implementation variant
"SqueezeStrategy": true, // Enable squeeze detection
"stop_loss_pct": 0.05 // Stop loss percentage
}
}
```
**Algorithm**:
**MarketRegimeStrategy** (Primary Implementation):
1. **Market Regime Detection**: Determines if market is trending or sideways
2. **Adaptive Parameters**: Adjusts BB/RSI thresholds based on market regime
3. **Trending Market Entry**: Price < Lower Band RSI < 50 Volume Spike
4. **Sideways Market Entry**: Price ≤ Lower Band ∧ RSI ≤ 40
5. **Exit Conditions**: Opposite band touch, RSI reversal, or stop-loss
6. **Volume Confirmation**: Requires 1.5× average volume for trending signals
**CryptoTradingStrategy** (Alternative Implementation):
1. **Multi-Timeframe Analysis**: Combines 15-minute and 1-hour Bollinger Bands
2. **Entry**: Price ≤ both 15m & 1h lower bands + RSI < 35 + Volume surge
3. **Exit**: 2:1 risk-reward ratio with ATR-based stops
4. **Adaptive Volatility**: Uses ATR for dynamic stop-loss/take-profit
**Strengths**:
- Adapts to different market regimes
- Multiple timeframe confirmation (internal)
- Volume analysis for signal quality
- Sophisticated entry/exit conditions
**Best Use Cases**:
- Volatile cryptocurrency markets
- Markets with alternating trending/sideways periods
- Short to medium-term trading
## Strategy Combination
### Multi-Strategy Architecture
The StrategyManager allows combining multiple strategies with configurable rules:
```json
{
"strategies": [
{
"name": "default",
"weight": 0.6,
"params": {"timeframe": "15min"}
},
{
"name": "bbrs",
"weight": 0.4,
"params": {"strategy_name": "MarketRegimeStrategy"}
}
],
"combination_rules": {
"entry": "weighted_consensus",
"exit": "any",
"min_confidence": 0.6
}
}
```
### Signal Combination Methods
**Entry Combinations**:
- **`any`**: Enter if ANY strategy signals entry
- **`all`**: Enter only if ALL strategies signal entry
- **`majority`**: Enter if majority of strategies signal entry
- **`weighted_consensus`**: Enter based on weighted confidence average
**Exit Combinations**:
- **`any`**: Exit if ANY strategy signals exit (recommended for risk management)
- **`all`**: Exit only if ALL strategies agree
- **`priority`**: Prioritized exit (STOP_LOSS > SELL_SIGNAL > others)
## Performance Characteristics
### Default Strategy Performance
**Strengths**:
- **Trend Accuracy**: High accuracy in strong trending markets
- **Risk Management**: Defined stop-losses with 1-minute precision
- **Low Noise**: Multiple Supertrend confirmation reduces false signals
- **Adaptable**: Works across different timeframes
**Weaknesses**:
- **Sideways Markets**: May generate false signals in ranging markets
- **Lag**: Multiple confirmations can delay entry/exit signals
- **Whipsaws**: Vulnerable to rapid trend reversals
**Optimal Conditions**:
- Clear trending markets
- Medium to low volatility trending
- Sufficient data history for Supertrend calculation
### BBRS Strategy Performance
**Strengths**:
- **Market Adaptation**: Automatically adjusts to market regime
- **Volume Confirmation**: Reduces false signals with volume analysis
- **Multi-Timeframe**: Internal analysis across multiple timeframes
- **Volatility Handling**: Designed for cryptocurrency volatility
**Weaknesses**:
- **Complexity**: More parameters to optimize
- **Market Noise**: Can be sensitive to short-term noise
- **Volume Dependency**: Requires reliable volume data
**Optimal Conditions**:
- High-volume cryptocurrency markets
- Markets with clear regime shifts
- Sufficient data for regime detection
## Usage Examples
### Single Strategy Backtests
```bash
# Default strategy on 15-minute timeframe
uv run .\main.py .\configs\config_default.json
# Default strategy on 5-minute timeframe
uv run .\main.py .\configs\config_default_5min.json
# BBRS strategy with market regime detection
uv run .\main.py .\configs\config_bbrs.json
```
### Multi-Strategy Backtests
```bash
# Combined strategies with weighted consensus
uv run .\main.py .\configs\config_combined.json
```
### Custom Configurations
**Aggressive Default Strategy**:
```json
{
"name": "default",
"params": {
"timeframe": "5min", // Faster signals
"stop_loss_pct": 0.02 // Tighter stop-loss
}
}
```
**Conservative BBRS Strategy**:
```json
{
"name": "bbrs",
"params": {
"bb_width": 0.03, // Tighter BB width
"stop_loss_pct": 0.07, // Wider stop-loss
"SqueezeStrategy": false // Disable squeeze for simplicity
}
}
```
## Development Guidelines
### Creating New Strategies
1. **Inherit from StrategyBase**:
```python
from cycles.strategies.base import StrategyBase, StrategySignal
class NewStrategy(StrategyBase):
def __init__(self, weight=1.0, params=None):
super().__init__("new_strategy", weight, params)
```
2. **Specify Timeframes**:
```python
def get_timeframes(self):
return ["1h"] # Specify required timeframes
```
3. **Implement Core Methods**:
```python
def initialize(self, backtester):
self._resample_data(backtester.original_df)
# Calculate indicators...
self.initialized = True
def get_entry_signal(self, backtester, df_index):
# Entry logic...
return StrategySignal("ENTRY", confidence=0.8)
def get_exit_signal(self, backtester, df_index):
# Exit logic...
return StrategySignal("EXIT", confidence=1.0)
```
4. **Register Strategy**:
```python
# In StrategyManager._load_strategies()
elif name == "new_strategy":
strategies.append(NewStrategy(weight, params))
```
### Timeframe Best Practices
1. **Minimize Timeframe Requirements**:
```python
def get_timeframes(self):
return ["15min"] # Only what's needed
```
2. **Include 1min for Stop-Loss**:
```python
def get_timeframes(self):
primary_tf = self.params.get("timeframe", "15min")
timeframes = [primary_tf]
if "1min" not in timeframes:
timeframes.append("1min")
return timeframes
```
3. **Handle Multi-Timeframe Synchronization**:
```python
def get_entry_signal(self, backtester, df_index):
# Get current timestamp from primary timeframe
primary_data = self.get_primary_timeframe_data()
current_time = primary_data.index[df_index]
# Map to other timeframes
hourly_data = self.get_data_for_timeframe("1h")
h1_idx = hourly_data.index.get_indexer([current_time], method='ffill')[0]
```
## Testing and Validation
### Strategy Testing Workflow
1. **Individual Strategy Testing**:
- Test each strategy independently
- Validate on different timeframes
- Check edge cases and data sufficiency
2. **Multi-Strategy Testing**:
- Test strategy combinations
- Validate combination rules
- Monitor for signal conflicts
3. **Timeframe Validation**:
- Ensure consistent behavior across timeframes
- Validate data alignment
- Check memory usage with large datasets
### Performance Monitoring
```python
# Get strategy summary
summary = strategy_manager.get_strategy_summary()
print(f"Strategies: {[s['name'] for s in summary['strategies']]}")
print(f"Timeframes: {summary['all_timeframes']}")
# Monitor individual strategy performance
for strategy in strategy_manager.strategies:
print(f"{strategy.name}: {strategy.get_timeframes()}")
```
## Advanced Topics
### Multi-Timeframe Strategy Development
For strategies requiring multiple timeframes:
```python
class MultiTimeframeStrategy(StrategyBase):
def get_timeframes(self):
return ["5min", "15min", "1h"]
def get_entry_signal(self, backtester, df_index):
# Analyze multiple timeframes
data_5m = self.get_data_for_timeframe("5min")
data_15m = self.get_data_for_timeframe("15min")
data_1h = self.get_data_for_timeframe("1h")
# Synchronize across timeframes
current_time = data_5m.index[df_index]
idx_15m = data_15m.index.get_indexer([current_time], method='ffill')[0]
idx_1h = data_1h.index.get_indexer([current_time], method='ffill')[0]
# Multi-timeframe logic
short_signal = self._analyze_5min(data_5m, df_index)
medium_signal = self._analyze_15min(data_15m, idx_15m)
long_signal = self._analyze_1h(data_1h, idx_1h)
# Combine signals with appropriate confidence
if short_signal and medium_signal and long_signal:
return StrategySignal("ENTRY", confidence=0.9)
elif short_signal and medium_signal:
return StrategySignal("ENTRY", confidence=0.7)
else:
return StrategySignal("HOLD", confidence=0.0)
```
### Strategy Optimization
1. **Parameter Optimization**: Systematic testing of strategy parameters
2. **Timeframe Optimization**: Finding optimal timeframes for each strategy
3. **Combination Optimization**: Optimizing weights and combination rules
4. **Market Regime Adaptation**: Adapting strategies to different market conditions
For detailed timeframe system documentation, see [Timeframe System](./timeframe_system.md).

View File

@ -1,390 +0,0 @@
# Strategy Manager Documentation
## Overview
The Strategy Manager is a sophisticated orchestration system that enables the combination of multiple trading strategies with configurable signal aggregation rules. It supports multi-timeframe analysis, weighted consensus voting, and flexible signal combination methods.
## Architecture
### Core Components
1. **StrategyBase**: Abstract base class defining the strategy interface
2. **StrategySignal**: Encapsulates trading signals with confidence levels
3. **StrategyManager**: Orchestrates multiple strategies and combines signals
4. **Strategy Implementations**: DefaultStrategy, BBRSStrategy, etc.
### New Timeframe System
The framework now supports strategy-level timeframe management:
- **Strategy-Controlled Timeframes**: Each strategy specifies its required timeframes
- **Automatic Data Resampling**: Framework automatically resamples 1-minute data to strategy needs
- **Multi-Timeframe Support**: Strategies can use multiple timeframes simultaneously
- **Precision Stop-Loss**: All strategies maintain 1-minute data for precise execution
```python
class MyStrategy(StrategyBase):
def get_timeframes(self):
return ["15min", "1h"] # Strategy needs both timeframes
def initialize(self, backtester):
# Access resampled data
data_15m = self.get_data_for_timeframe("15min")
data_1h = self.get_data_for_timeframe("1h")
# Setup indicators...
```
## Strategy Interface
### StrategyBase Class
All strategies must inherit from `StrategyBase` and implement:
```python
from cycles.strategies.base import StrategyBase, StrategySignal
class MyStrategy(StrategyBase):
def get_timeframes(self) -> List[str]:
"""Specify required timeframes"""
return ["15min"]
def initialize(self, backtester) -> None:
"""Setup strategy with data"""
self._resample_data(backtester.original_df)
# Calculate indicators...
self.initialized = True
def get_entry_signal(self, backtester, df_index: int) -> StrategySignal:
"""Generate entry signals"""
if condition_met:
return StrategySignal("ENTRY", confidence=0.8)
return StrategySignal("HOLD", confidence=0.0)
def get_exit_signal(self, backtester, df_index: int) -> StrategySignal:
"""Generate exit signals"""
if exit_condition:
return StrategySignal("EXIT", confidence=1.0,
metadata={"type": "SELL_SIGNAL"})
return StrategySignal("HOLD", confidence=0.0)
```
### StrategySignal Class
Encapsulates trading signals with metadata:
```python
# Entry signal with high confidence
entry_signal = StrategySignal("ENTRY", confidence=0.9)
# Exit signal with specific price
exit_signal = StrategySignal("EXIT", confidence=1.0, price=50000,
metadata={"type": "STOP_LOSS"})
# Hold signal
hold_signal = StrategySignal("HOLD", confidence=0.0)
```
## Available Strategies
### 1. Default Strategy
Meta-trend analysis using multiple Supertrend indicators.
**Features:**
- Uses 3 Supertrend indicators with different parameters
- Configurable timeframe (default: 15min)
- Entry when all trends align upward
- Exit on trend reversal or stop-loss
**Configuration:**
```json
{
"name": "default",
"weight": 1.0,
"params": {
"timeframe": "15min",
"stop_loss_pct": 0.03
}
}
```
**Timeframes:**
- Primary: Configurable (default 15min)
- Stop-loss: Always includes 1min for precision
### 2. BBRS Strategy
Bollinger Bands + RSI with market regime detection.
**Features:**
- Market regime detection (trending vs sideways)
- Adaptive parameters based on market conditions
- Volume analysis and confirmation
- Multi-timeframe internal analysis (1min → 15min/1h)
**Configuration:**
```json
{
"name": "bbrs",
"weight": 1.0,
"params": {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"strategy_name": "MarketRegimeStrategy",
"stop_loss_pct": 0.05
}
}
```
**Timeframes:**
- Input: 1min (Strategy class handles internal resampling)
- Internal: 15min, 1h (handled by underlying Strategy class)
- Output: Mapped back to 1min for backtesting
## Signal Combination
### Entry Signal Combination
```python
combination_rules = {
"entry": "weighted_consensus", # or "any", "all", "majority"
"min_confidence": 0.6
}
```
**Methods:**
- **`any`**: Enter if ANY strategy signals entry
- **`all`**: Enter only if ALL strategies signal entry
- **`majority`**: Enter if majority of strategies signal entry
- **`weighted_consensus`**: Enter based on weighted average confidence
### Exit Signal Combination
```python
combination_rules = {
"exit": "priority" # or "any", "all"
}
```
**Methods:**
- **`any`**: Exit if ANY strategy signals exit (recommended for risk management)
- **`all`**: Exit only if ALL strategies agree
- **`priority`**: Prioritized exit (STOP_LOSS > SELL_SIGNAL > others)
## Configuration
### Basic Strategy Manager Setup
```json
{
"strategies": [
{
"name": "default",
"weight": 0.6,
"params": {
"timeframe": "15min",
"stop_loss_pct": 0.03
}
},
{
"name": "bbrs",
"weight": 0.4,
"params": {
"bb_width": 0.05,
"strategy_name": "MarketRegimeStrategy"
}
}
],
"combination_rules": {
"entry": "weighted_consensus",
"exit": "any",
"min_confidence": 0.5
}
}
```
### Timeframe Examples
**Single Timeframe Strategy:**
```json
{
"name": "default",
"params": {
"timeframe": "5min" # Strategy works on 5-minute data
}
}
```
**Multi-Timeframe Strategy (Future Enhancement):**
```json
{
"name": "multi_tf_strategy",
"params": {
"timeframes": ["5min", "15min", "1h"], # Multiple timeframes
"primary_timeframe": "15min"
}
}
```
## Usage Examples
### Create Strategy Manager
```python
from cycles.strategies import create_strategy_manager
config = {
"strategies": [
{"name": "default", "weight": 1.0, "params": {"timeframe": "15min"}}
],
"combination_rules": {
"entry": "any",
"exit": "any"
}
}
strategy_manager = create_strategy_manager(config)
```
### Initialize and Use
```python
# Initialize with backtester
strategy_manager.initialize(backtester)
# Get signals during backtesting
entry_signal = strategy_manager.get_entry_signal(backtester, df_index)
exit_signal, exit_price = strategy_manager.get_exit_signal(backtester, df_index)
# Get strategy summary
summary = strategy_manager.get_strategy_summary()
print(f"Loaded strategies: {[s['name'] for s in summary['strategies']]}")
print(f"All timeframes: {summary['all_timeframes']}")
```
## Extending the System
### Adding New Strategies
1. **Create Strategy Class:**
```python
class NewStrategy(StrategyBase):
def get_timeframes(self):
return ["1h"] # Specify required timeframes
def initialize(self, backtester):
self._resample_data(backtester.original_df)
# Setup indicators...
self.initialized = True
def get_entry_signal(self, backtester, df_index):
# Implement entry logic
pass
def get_exit_signal(self, backtester, df_index):
# Implement exit logic
pass
```
2. **Register in StrategyManager:**
```python
# In StrategyManager._load_strategies()
elif name == "new_strategy":
strategies.append(NewStrategy(weight, params))
```
### Multi-Timeframe Strategy Development
For strategies requiring multiple timeframes:
```python
class MultiTimeframeStrategy(StrategyBase):
def get_timeframes(self):
return ["5min", "15min", "1h"]
def initialize(self, backtester):
self._resample_data(backtester.original_df)
# Access different timeframes
data_5m = self.get_data_for_timeframe("5min")
data_15m = self.get_data_for_timeframe("15min")
data_1h = self.get_data_for_timeframe("1h")
# Calculate indicators on each timeframe
# ...
def _calculate_signal_confidence(self, backtester, df_index):
# Analyze multiple timeframes for confidence
primary_signal = self._get_primary_signal(df_index)
confirmation = self._get_timeframe_confirmation(df_index)
return primary_signal * confirmation
```
## Performance Considerations
### Timeframe Management
- **Efficient Resampling**: Each strategy resamples data once during initialization
- **Memory Usage**: Only required timeframes are kept in memory
- **Signal Mapping**: Efficient mapping between timeframes using pandas reindex
### Strategy Combination
- **Lazy Evaluation**: Signals calculated only when needed
- **Error Handling**: Individual strategy failures don't crash the system
- **Logging**: Comprehensive logging for debugging and monitoring
## Best Practices
1. **Strategy Design:**
- Specify minimal required timeframes
- Include 1min for stop-loss precision
- Use confidence levels effectively
2. **Signal Combination:**
- Use `any` for exits (risk management)
- Use `weighted_consensus` for entries
- Set appropriate minimum confidence levels
3. **Error Handling:**
- Implement robust initialization checks
- Handle missing data gracefully
- Log strategy-specific warnings
4. **Testing:**
- Test strategies individually before combining
- Validate timeframe requirements
- Monitor memory usage with large datasets
## Troubleshooting
### Common Issues
1. **Timeframe Mismatches:**
- Ensure strategy specifies correct timeframes
- Check data availability for all timeframes
2. **Signal Conflicts:**
- Review combination rules
- Adjust confidence thresholds
- Monitor strategy weights
3. **Performance Issues:**
- Minimize timeframe requirements
- Optimize indicator calculations
- Use efficient pandas operations
### Debugging Tips
- Enable detailed logging: `logging.basicConfig(level=logging.DEBUG)`
- Use strategy summary: `manager.get_strategy_summary()`
- Test individual strategies before combining
- Monitor signal confidence levels
---
**Version**: 1.0.0
**Last Updated**: January 2025
**TCP Cycles Project**

View File

@ -1,488 +0,0 @@
# Timeframe System Documentation
## Overview
The Cycles framework features a sophisticated timeframe management system that allows strategies to operate on their preferred timeframes while maintaining precise execution control. This system supports both single-timeframe and multi-timeframe strategies with automatic data resampling and intelligent signal mapping.
## Architecture
### Core Concepts
1. **Strategy-Controlled Timeframes**: Each strategy specifies its required timeframes
2. **Automatic Resampling**: Framework resamples 1-minute data to strategy needs
3. **Precision Execution**: All strategies maintain 1-minute data for accurate stop-loss execution
4. **Signal Mapping**: Intelligent mapping between different timeframe resolutions
### Data Flow
```
Original 1min Data
Strategy.get_timeframes() → ["15min", "1h"]
Automatic Resampling
Strategy Logic (15min + 1h analysis)
Signal Generation
Map to Working Timeframe
Backtesting Engine
```
## Strategy Timeframe Interface
### StrategyBase Methods
All strategies inherit timeframe capabilities from `StrategyBase`:
```python
class MyStrategy(StrategyBase):
def get_timeframes(self) -> List[str]:
"""Specify required timeframes for this strategy"""
return ["15min", "1h"] # Strategy needs both timeframes
def initialize(self, backtester) -> None:
# Automatic resampling happens here
self._resample_data(backtester.original_df)
# Access resampled data
data_15m = self.get_data_for_timeframe("15min")
data_1h = self.get_data_for_timeframe("1h")
# Calculate indicators on each timeframe
self.indicators_15m = self._calculate_indicators(data_15m)
self.indicators_1h = self._calculate_indicators(data_1h)
self.initialized = True
```
### Data Access Methods
```python
# Get data for specific timeframe
data_15m = strategy.get_data_for_timeframe("15min")
# Get primary timeframe data (first in list)
primary_data = strategy.get_primary_timeframe_data()
# Check available timeframes
timeframes = strategy.get_timeframes()
```
## Supported Timeframes
### Standard Timeframes
- **`"1min"`**: 1-minute bars (original resolution)
- **`"5min"`**: 5-minute bars
- **`"15min"`**: 15-minute bars
- **`"30min"`**: 30-minute bars
- **`"1h"`**: 1-hour bars
- **`"4h"`**: 4-hour bars
- **`"1d"`**: Daily bars
### Custom Timeframes
Any pandas-compatible frequency string is supported:
- **`"2min"`**: 2-minute bars
- **`"10min"`**: 10-minute bars
- **`"2h"`**: 2-hour bars
- **`"12h"`**: 12-hour bars
## Strategy Examples
### Single Timeframe Strategy
```python
class SingleTimeframeStrategy(StrategyBase):
def get_timeframes(self):
return ["15min"] # Only needs 15-minute data
def initialize(self, backtester):
self._resample_data(backtester.original_df)
# Work with 15-minute data
data = self.get_primary_timeframe_data()
self.indicators = self._calculate_indicators(data)
self.initialized = True
def get_entry_signal(self, backtester, df_index):
# df_index refers to 15-minute data
if self.indicators['signal'][df_index]:
return StrategySignal("ENTRY", confidence=0.8)
return StrategySignal("HOLD", confidence=0.0)
```
### Multi-Timeframe Strategy
```python
class MultiTimeframeStrategy(StrategyBase):
def get_timeframes(self):
return ["15min", "1h", "4h"] # Multiple timeframes
def initialize(self, backtester):
self._resample_data(backtester.original_df)
# Access different timeframes
self.data_15m = self.get_data_for_timeframe("15min")
self.data_1h = self.get_data_for_timeframe("1h")
self.data_4h = self.get_data_for_timeframe("4h")
# Calculate indicators on each timeframe
self.trend_4h = self._calculate_trend(self.data_4h)
self.momentum_1h = self._calculate_momentum(self.data_1h)
self.entry_signals_15m = self._calculate_entries(self.data_15m)
self.initialized = True
def get_entry_signal(self, backtester, df_index):
# Primary timeframe is 15min (first in list)
# Map df_index to other timeframes for confirmation
# Get current 15min timestamp
current_time = self.data_15m.index[df_index]
# Find corresponding indices in other timeframes
h1_idx = self.data_1h.index.get_indexer([current_time], method='ffill')[0]
h4_idx = self.data_4h.index.get_indexer([current_time], method='ffill')[0]
# Multi-timeframe confirmation
trend_ok = self.trend_4h[h4_idx] > 0
momentum_ok = self.momentum_1h[h1_idx] > 0.5
entry_signal = self.entry_signals_15m[df_index]
if trend_ok and momentum_ok and entry_signal:
confidence = 0.9 # High confidence with all timeframes aligned
return StrategySignal("ENTRY", confidence=confidence)
return StrategySignal("HOLD", confidence=0.0)
```
### Configurable Timeframe Strategy
```python
class ConfigurableStrategy(StrategyBase):
def get_timeframes(self):
# Strategy timeframe configurable via parameters
primary_tf = self.params.get("timeframe", "15min")
return [primary_tf, "1min"] # Primary + 1min for stop-loss
def initialize(self, backtester):
self._resample_data(backtester.original_df)
primary_tf = self.get_timeframes()[0]
self.data = self.get_data_for_timeframe(primary_tf)
# Indicator parameters can also be timeframe-dependent
if primary_tf == "5min":
self.ma_period = 20
elif primary_tf == "15min":
self.ma_period = 14
else:
self.ma_period = 10
self.indicators = self._calculate_indicators(self.data)
self.initialized = True
```
## Built-in Strategy Timeframe Behavior
### Default Strategy
**Timeframes**: Configurable primary + 1min for stop-loss
```python
# Configuration
{
"name": "default",
"params": {
"timeframe": "5min" # Configurable timeframe
}
}
# Resulting timeframes: ["5min", "1min"]
```
**Features**:
- Supertrend analysis on configured timeframe
- 1-minute precision for stop-loss execution
- Optimized for 15-minute default, but works on any timeframe
### BBRS Strategy
**Timeframes**: 1min input (internal resampling)
```python
# Configuration
{
"name": "bbrs",
"params": {
"strategy_name": "MarketRegimeStrategy"
}
}
# Resulting timeframes: ["1min"]
```
**Features**:
- Uses 1-minute data as input
- Internal resampling to 15min/1h by Strategy class
- Signals mapped back to 1-minute resolution
- No double-resampling issues
## Advanced Features
### Timeframe Synchronization
When working with multiple timeframes, synchronization is crucial:
```python
def _get_synchronized_signals(self, df_index, primary_timeframe="15min"):
"""Get signals synchronized across timeframes"""
# Get timestamp from primary timeframe
primary_data = self.get_data_for_timeframe(primary_timeframe)
current_time = primary_data.index[df_index]
signals = {}
for tf in self.get_timeframes():
if tf == primary_timeframe:
signals[tf] = df_index
else:
# Find corresponding index in other timeframe
tf_data = self.get_data_for_timeframe(tf)
tf_idx = tf_data.index.get_indexer([current_time], method='ffill')[0]
signals[tf] = tf_idx
return signals
```
### Dynamic Timeframe Selection
Strategies can adapt timeframes based on market conditions:
```python
class AdaptiveStrategy(StrategyBase):
def get_timeframes(self):
# Fixed set of timeframes strategy might need
return ["5min", "15min", "1h"]
def _select_active_timeframe(self, market_volatility):
"""Select timeframe based on market conditions"""
if market_volatility > 0.8:
return "5min" # High volatility -> shorter timeframe
elif market_volatility > 0.4:
return "15min" # Medium volatility -> medium timeframe
else:
return "1h" # Low volatility -> longer timeframe
def get_entry_signal(self, backtester, df_index):
# Calculate market volatility
volatility = self._calculate_volatility(df_index)
# Select appropriate timeframe
active_tf = self._select_active_timeframe(volatility)
# Generate signal on selected timeframe
return self._generate_signal_for_timeframe(active_tf, df_index)
```
## Configuration Examples
### Single Timeframe Configuration
```json
{
"strategies": [
{
"name": "default",
"weight": 1.0,
"params": {
"timeframe": "15min",
"stop_loss_pct": 0.03
}
}
]
}
```
### Multi-Timeframe Configuration
```json
{
"strategies": [
{
"name": "multi_timeframe_strategy",
"weight": 1.0,
"params": {
"primary_timeframe": "15min",
"confirmation_timeframes": ["1h", "4h"],
"signal_timeframe": "5min"
}
}
]
}
```
### Mixed Strategy Configuration
```json
{
"strategies": [
{
"name": "default",
"weight": 0.6,
"params": {
"timeframe": "15min"
}
},
{
"name": "bbrs",
"weight": 0.4,
"params": {
"strategy_name": "MarketRegimeStrategy"
}
}
]
}
```
## Performance Considerations
### Memory Usage
- Only required timeframes are resampled and stored
- Original 1-minute data shared across all strategies
- Efficient pandas resampling with minimal memory overhead
### Processing Speed
- Resampling happens once during initialization
- No repeated resampling during backtesting
- Vectorized operations on pre-computed timeframes
### Data Alignment
- All timeframes aligned to original 1-minute timestamps
- Forward-fill resampling ensures data availability
- Intelligent handling of missing data points
## Best Practices
### 1. Minimize Timeframe Requirements
```python
# Good - minimal timeframes
def get_timeframes(self):
return ["15min"]
# Less optimal - unnecessary timeframes
def get_timeframes(self):
return ["1min", "5min", "15min", "1h", "4h", "1d"]
```
### 2. Use Appropriate Timeframes for Strategy Logic
```python
# Good - timeframe matches strategy logic
class TrendStrategy(StrategyBase):
def get_timeframes(self):
return ["1h"] # Trend analysis works well on hourly data
class ScalpingStrategy(StrategyBase):
def get_timeframes(self):
return ["1min", "5min"] # Scalping needs fine-grained data
```
### 3. Include 1min for Stop-Loss Precision
```python
def get_timeframes(self):
primary_tf = self.params.get("timeframe", "15min")
timeframes = [primary_tf]
# Always include 1min for precise stop-loss
if "1min" not in timeframes:
timeframes.append("1min")
return timeframes
```
### 4. Handle Timeframe Edge Cases
```python
def get_entry_signal(self, backtester, df_index):
# Check bounds for all timeframes
if df_index >= len(self.get_primary_timeframe_data()):
return StrategySignal("HOLD", confidence=0.0)
# Robust timeframe indexing
try:
signal = self._calculate_signal(df_index)
return signal
except IndexError:
return StrategySignal("HOLD", confidence=0.0)
```
## Troubleshooting
### Common Issues
1. **Index Out of Bounds**
```python
# Problem: Different timeframes have different lengths
# Solution: Always check bounds
if df_index < len(self.data_1h):
signal = self.data_1h[df_index]
```
2. **Timeframe Misalignment**
```python
# Problem: Assuming same index across timeframes
# Solution: Use timestamp-based alignment
current_time = primary_data.index[df_index]
h1_idx = hourly_data.index.get_indexer([current_time], method='ffill')[0]
```
3. **Memory Issues with Large Datasets**
```python
# Solution: Only include necessary timeframes
def get_timeframes(self):
# Return minimal set
return ["15min"] # Not ["1min", "5min", "15min", "1h"]
```
### Debugging Tips
```python
# Log timeframe information
def initialize(self, backtester):
self._resample_data(backtester.original_df)
for tf in self.get_timeframes():
data = self.get_data_for_timeframe(tf)
print(f"Timeframe {tf}: {len(data)} bars, "
f"from {data.index[0]} to {data.index[-1]}")
self.initialized = True
```
## Future Enhancements
### Planned Features
1. **Dynamic Timeframe Switching**: Strategies adapt timeframes based on market conditions
2. **Timeframe Confidence Weighting**: Different confidence levels per timeframe
3. **Cross-Timeframe Signal Validation**: Automatic signal confirmation across timeframes
4. **Optimized Memory Management**: Lazy loading and caching for large datasets
### Extension Points
The timeframe system is designed for easy extension:
- Custom resampling methods
- Alternative timeframe synchronization strategies
- Market-specific timeframe preferences
- Real-time timeframe adaptation

View File

@ -2,10 +2,11 @@ import logging
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
import datetime
from cycles.utils.storage import Storage
from cycles.Analysis.strategies import Strategy
from cycles.utils.data_utils import aggregate_to_daily
from cycles.Analysis.boillinger_band import BollingerBands
from cycles.Analysis.rsi import RSI
logging.basicConfig(
level=logging.INFO,
@ -16,145 +17,115 @@ logging.basicConfig(
]
)
config = {
"start_date": "2025-03-01",
"stop_date": datetime.datetime.today().strftime('%Y-%m-%d'),
config_minute = {
"start_date": "2022-01-01",
"stop_date": "2023-01-01",
"data_file": "btcusd_1-min_data.csv"
}
config_strategy = {
"bb_width": 0.05,
"bb_period": 20,
"rsi_period": 14,
"trending": {
"rsi_threshold": [30, 70],
"bb_std_dev_multiplier": 2.5,
},
"sideways": {
"rsi_threshold": [40, 60],
"bb_std_dev_multiplier": 1.8,
},
"strategy_name": "MarketRegimeStrategy", # CryptoTradingStrategy
"SqueezeStrategy": True
config_day = {
"start_date": "2022-01-01",
"stop_date": "2023-01-01",
"data_file": "btcusd_1-day_data.csv"
}
IS_DAY = False
IS_DAY = True
def no_strategy(data_bb, data_with_rsi):
buy_condition = pd.Series([False] * len(data_bb), index=data_bb.index)
sell_condition = pd.Series([False] * len(data_bb), index=data_bb.index)
return buy_condition, sell_condition
def strategy_1(data_bb, data_with_rsi):
# Long trade: price move below lower Bollinger band and RSI go below 25
buy_condition = (data_bb['close'] < data_bb['LowerBand']) & (data_bb['RSI'] < 25)
# Short only: price move above top Bollinger band and RSI goes over 75
sell_condition = (data_bb['close'] > data_bb['UpperBand']) & (data_bb['RSI'] > 75)
return buy_condition, sell_condition
if __name__ == "__main__":
# Load data
storage = Storage(logging=logging)
if IS_DAY:
config = config_day
else:
config = config_minute
data = storage.load_data(config["data_file"], config["start_date"], config["stop_date"])
# Run strategy
strategy = Strategy(config=config_strategy, logging=logging)
processed_data = strategy.run(data.copy(), config_strategy["strategy_name"])
if not IS_DAY:
data_daily = aggregate_to_daily(data)
storage.save_data(data, "btcusd_1-day_data.csv")
df_to_plot = data_daily
else:
df_to_plot = data
# Get buy and sell signals
buy_condition = processed_data.get('BuySignal', pd.Series(False, index=processed_data.index)).astype(bool)
sell_condition = processed_data.get('SellSignal', pd.Series(False, index=processed_data.index)).astype(bool)
bb = BollingerBands(period=30, std_dev_multiplier=2.0)
data_bb = bb.calculate(df_to_plot.copy())
buy_signals = processed_data[buy_condition]
sell_signals = processed_data[sell_condition]
rsi_calculator = RSI(period=13)
data_with_rsi = rsi_calculator.calculate(df_to_plot.copy(), price_column='close')
# Plot the data with seaborn library
if processed_data is not None and not processed_data.empty:
# Combine BB and RSI data into a single DataFrame for signal generation
# Ensure indices are aligned; they should be as both are from df_to_plot.copy()
if 'RSI' in data_with_rsi.columns:
data_bb['RSI'] = data_with_rsi['RSI']
else:
# If RSI wasn't calculated (e.g., not enough data), create a dummy column with NaNs
# to prevent errors later, though signals won't be generated.
data_bb['RSI'] = pd.Series(index=data_bb.index, dtype=float)
logging.warning("RSI column not found or not calculated. Signals relying on RSI may not be generated.")
strategy = 1
if strategy == 1:
buy_condition, sell_condition = strategy_1(data_bb, data_with_rsi)
else:
buy_condition, sell_condition = no_strategy(data_bb, data_with_rsi)
buy_signals = data_bb[buy_condition]
sell_signals = data_bb[sell_condition]
# plot the data with seaborn library
if df_to_plot is not None and not df_to_plot.empty:
# Create a figure with two subplots, sharing the x-axis
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, figsize=(16, 8), sharex=True)
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(16, 8), sharex=True)
strategy_name = config_strategy["strategy_name"]
# Plot 1: Close Price and Strategy-Specific Bands/Levels
sns.lineplot(x=processed_data.index, y='close', data=processed_data, label='Close Price', ax=ax1)
# Use standardized column names for bands
if 'UpperBand' in processed_data.columns and 'LowerBand' in processed_data.columns:
# Instead of lines, shade the area between upper and lower bands
ax1.fill_between(processed_data.index,
processed_data['LowerBand'],
processed_data['UpperBand'],
alpha=0.1, color='blue', label='Bollinger Bands')
else:
logging.warning(f"{strategy_name}: UpperBand or LowerBand not found for plotting.")
# Add strategy-specific extra indicators if available
if strategy_name == "CryptoTradingStrategy":
if 'StopLoss' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='StopLoss', data=processed_data, label='Stop Loss', ax=ax1, linestyle='--', color='orange')
if 'TakeProfit' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='TakeProfit', data=processed_data, label='Take Profit', ax=ax1, linestyle='--', color='purple')
# Plot 1: Close Price and Bollinger Bands
sns.lineplot(x=data_bb.index, y='close', data=data_bb, label='Close Price', ax=ax1)
sns.lineplot(x=data_bb.index, y='UpperBand', data=data_bb, label='Upper Band (BB)', ax=ax1)
sns.lineplot(x=data_bb.index, y='LowerBand', data=data_bb, label='Lower Band (BB)', ax=ax1)
# Plot Buy/Sell signals on Price chart
if not buy_signals.empty:
ax1.scatter(buy_signals.index, buy_signals['close'], color='green', marker='o', s=20, label='Buy Signal', zorder=5)
if not sell_signals.empty:
ax1.scatter(sell_signals.index, sell_signals['close'], color='red', marker='o', s=20, label='Sell Signal', zorder=5)
ax1.set_title(f'Price and Signals ({strategy_name})')
ax1.set_title('Price and Bollinger Bands with Signals')
ax1.set_ylabel('Price')
ax1.legend()
ax1.grid(True)
# Plot 2: RSI and Strategy-Specific Thresholds
if 'RSI' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='RSI', data=processed_data, label=f'RSI (' + str(config_strategy.get("rsi_period", 14)) + ')', ax=ax2, color='purple')
if strategy_name == "MarketRegimeStrategy":
# Get threshold values
upper_threshold = config_strategy.get("trending", {}).get("rsi_threshold", [30,70])[1]
lower_threshold = config_strategy.get("trending", {}).get("rsi_threshold", [30,70])[0]
# Shade overbought area (upper)
ax2.fill_between(processed_data.index, upper_threshold, 100,
alpha=0.1, color='red', label=f'Overbought (>{upper_threshold})')
# Shade oversold area (lower)
ax2.fill_between(processed_data.index, 0, lower_threshold,
alpha=0.1, color='green', label=f'Oversold (<{lower_threshold})')
elif strategy_name == "CryptoTradingStrategy":
# Shade overbought area (upper)
ax2.fill_between(processed_data.index, 65, 100,
alpha=0.1, color='red', label='Overbought (>65)')
# Shade oversold area (lower)
ax2.fill_between(processed_data.index, 0, 35,
alpha=0.1, color='green', label='Oversold (<35)')
# Plot 2: RSI
if 'RSI' in data_bb.columns: # Check data_bb now as it should contain RSI
sns.lineplot(x=data_bb.index, y='RSI', data=data_bb, label='RSI (14)', ax=ax2, color='purple')
ax2.axhline(75, color='red', linestyle='--', linewidth=0.8, label='Overbought (75)')
ax2.axhline(25, color='green', linestyle='--', linewidth=0.8, label='Oversold (25)')
# Plot Buy/Sell signals on RSI chart
if not buy_signals.empty and 'RSI' in buy_signals.columns:
if not buy_signals.empty:
ax2.scatter(buy_signals.index, buy_signals['RSI'], color='green', marker='o', s=20, label='Buy Signal (RSI)', zorder=5)
if not sell_signals.empty and 'RSI' in sell_signals.columns:
if not sell_signals.empty:
ax2.scatter(sell_signals.index, sell_signals['RSI'], color='red', marker='o', s=20, label='Sell Signal (RSI)', zorder=5)
ax2.set_title('Relative Strength Index (RSI) with Signals')
ax2.set_ylabel('RSI Value')
ax2.set_ylim(0, 100)
ax2.set_ylim(0, 100) # RSI is typically bounded between 0 and 100
ax2.legend()
ax2.grid(True)
else:
logging.info("RSI data not available for plotting.")
# Plot 3: Strategy-Specific Indicators
ax3.clear() # Clear previous plot content if any
if 'BBWidth' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='BBWidth', data=processed_data, label='BB Width', ax=ax3)
if strategy_name == "MarketRegimeStrategy":
if 'MarketRegime' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='MarketRegime', data=processed_data, label='Market Regime (Sideways: 1, Trending: 0)', ax=ax3)
ax3.set_title('Bollinger Bands Width & Market Regime')
ax3.set_ylabel('Value')
elif strategy_name == "CryptoTradingStrategy":
if 'VolumeMA' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='VolumeMA', data=processed_data, label='Volume MA', ax=ax3)
if 'volume' in processed_data.columns:
sns.lineplot(x=processed_data.index, y='volume', data=processed_data, label='Volume', ax=ax3, alpha=0.5)
ax3.set_title('Volume Analysis')
ax3.set_ylabel('Volume')
ax3.legend()
ax3.grid(True)
plt.xlabel('Date')
fig.tight_layout()
plt.xlabel('Date') # Common X-axis label
fig.tight_layout() # Adjust layout to prevent overlapping titles/labels
plt.show()
else:
logging.info("No data to plot.")

View File

@ -1,229 +0,0 @@
import os
import time
import hmac
import hashlib
import base64
import json
import pandas as pd
import threading
from websocket import create_connection, WebSocketTimeoutException
class CryptoComTrader:
ENV_URLS = {
"production": {
"WS_URL": "wss://deriv-stream.crypto.com/v1/market",
"WS_PRIVATE_URL": "wss://deriv-stream.crypto.com/v1/user"
},
"uat": {
"WS_URL": "wss://uat-deriv-stream.3ona.co/v1/market",
"WS_PRIVATE_URL": "wss://uat-deriv-stream.3ona.co/v1/user"
}
}
def __init__(self):
self.env = os.getenv("CRYPTOCOM_ENV", "UAT").lower()
urls = self.ENV_URLS.get(self.env, self.ENV_URLS["production"])
self.WS_URL = urls["WS_URL"]
self.WS_PRIVATE_URL = urls["WS_PRIVATE_URL"]
self.api_key = os.getenv("CRYPTOCOM_API_KEY")
self.api_secret = os.getenv("CRYPTOCOM_API_SECRET")
self.ws = None
self.ws_private = None
self._lock = threading.Lock()
self._private_lock = threading.Lock()
self._connect_ws()
def _connect_ws(self):
if self.ws is None:
self.ws = create_connection(self.WS_URL, timeout=10)
if self.api_key and self.api_secret and self.ws_private is None:
self.ws_private = create_connection(self.WS_PRIVATE_URL, timeout=10)
def _send_ws(self, payload, private=False):
ws = self.ws_private if private else self.ws
lock = self._private_lock if private else self._lock
with lock:
ws.send(json.dumps(payload))
try:
resp = ws.recv()
return json.loads(resp)
except WebSocketTimeoutException:
return None
def _sign(self, params):
t = str(int(time.time() * 1000))
params['id'] = t
params['nonce'] = t
params['api_key'] = self.api_key
param_str = json.dumps(params, separators=(',', ':'), sort_keys=True)
sig = hmac.new(
bytes(self.api_secret, 'utf-8'),
msg=bytes(param_str, 'utf-8'),
digestmod=hashlib.sha256
).hexdigest()
params['sig'] = sig
return params
def get_price(self):
"""
Get the latest ask price for BTC_USDC using WebSocket ticker subscription (one-shot).
"""
payload = {
"id": int(time.time() * 1000),
"method": "subscribe",
"params": {"channels": ["ticker.BTC_USDC"]}
}
resp = self._send_ws(payload)
# Wait for ticker update
while True:
data = self.ws.recv()
msg = json.loads(data)
if msg.get("method") == "ticker.update":
# 'a' is ask price
return msg["params"]["data"][0].get("a")
def get_order_book(self, depth=10):
"""
Fetch the order book for BTC_USDC with the specified depth using WebSocket (one-shot).
Returns a dict with 'bids' and 'asks'.
"""
payload = {
"id": int(time.time() * 1000),
"method": "subscribe",
"params": {"channels": [f"book.BTC_USDC.{depth}"]}
}
resp = self._send_ws(payload)
# Wait for book update
while True:
data = self.ws.recv()
msg = json.loads(data)
if msg.get("method") == "book.update":
book = msg["params"]["data"][0]
return {
"bids": book.get("bids", []),
"asks": book.get("asks", [])
}
def _authenticate(self):
"""
Authenticate the private WebSocket connection. Only needs to be done once per session.
"""
if not self.api_key or not self.api_secret:
raise ValueError("API key and secret must be set in environment variables.")
payload = {
"id": int(time.time() * 1000),
"method": "public/auth",
"api_key": self.api_key,
"nonce": int(time.time() * 1000),
}
# For auth, sig is HMAC_SHA256(method + id + api_key + nonce)
sig_payload = (
payload["method"] + str(payload["id"]) + self.api_key + str(payload["nonce"])
)
payload["sig"] = hmac.new(
bytes(self.api_secret, "utf-8"),
msg=bytes(sig_payload, "utf-8"),
digestmod=hashlib.sha256,
).hexdigest()
resp = self._send_ws(payload, private=True)
if not resp or resp.get("code") != 0:
raise Exception(f"WebSocket authentication failed: {resp}")
def _ensure_private_auth(self):
if self.ws_private is None:
self._connect_ws()
time.sleep(1) # recommended by docs
self._authenticate()
def get_balance(self, currency="USDC"):
"""
Fetch user balance using WebSocket private API.
"""
self._ensure_private_auth()
payload = {
"id": int(time.time() * 1000),
"method": "private/user-balance",
"params": {},
"nonce": int(time.time() * 1000),
}
resp = self._send_ws(payload, private=True)
if resp and resp.get("code") == 0:
balances = resp.get("result", {}).get("data", [])
if currency:
return [b for b in balances if b.get("instrument_name") == currency]
return balances
return []
def place_order(self, side, amount):
"""
Place a market order using WebSocket private API.
side: 'BUY' or 'SELL', amount: in BTC
"""
self._ensure_private_auth()
params = {
"instrument_name": "BTC_USDC",
"side": side,
"type": "MARKET",
"quantity": str(amount),
}
payload = {
"id": int(time.time() * 1000),
"method": "private/create-order",
"params": params,
"nonce": int(time.time() * 1000),
}
resp = self._send_ws(payload, private=True)
return resp
def buy_btc(self, amount):
return self.place_order("BUY", amount)
def sell_btc(self, amount):
return self.place_order("SELL", amount)
def get_candlesticks(self, timeframe='1m', count=100):
"""
Fetch candlestick (OHLCV) data for BTC_USDC using WebSocket.
Args:
timeframe (str): Timeframe for each candle (e.g., '1m', '5m', '15m', '1h', '4h', '1d').
count (int): Number of candles to fetch (max 1000 per API docs).
Returns:
pd.DataFrame: DataFrame with columns ['timestamp', 'open', 'high', 'low', 'close', 'volume']
"""
payload = {
"id": int(time.time() * 1000),
"method": "public/get-candlestick",
"params": {
"instrument_name": "BTC_USDC",
"timeframe": timeframe,
"count": count
}
}
resp = self._send_ws(payload)
candles = resp.get("result", {}).get("data", []) if resp else []
if not candles:
return pd.DataFrame(columns=["timestamp", "open", "high", "low", "close", "volume"])
df = pd.DataFrame(candles)
df['timestamp'] = pd.to_datetime(df['t'], unit='ms')
df = df.rename(columns={
'o': 'open',
'h': 'high',
'l': 'low',
'c': 'close',
'v': 'volume'
})
return df[['timestamp', 'open', 'high', 'low', 'close', 'volume']].sort_values('timestamp')
def get_instruments(self):
"""
Fetch the list of available trading instruments from Crypto.com using WebSocket.
Returns:
list: List of instrument dicts.
"""
payload = {
"id": int(time.time() * 1000),
"method": "public/get-instruments",
"params": {}
}
resp = self._send_ws(payload)
return resp.get("result", {}).get("data", []) if resp else []

View File

@ -1,84 +0,0 @@
import time
import plotly.graph_objs as go
import plotly.io as pio
from cryptocom_trader import CryptoComTrader
def plot_candlesticks(df):
if df.empty:
print("No data to plot.")
return None
# Convert columns to float
for col in ['open', 'high', 'low', 'close', 'volume']:
df[col] = df[col].astype(float)
# Plotly expects datetime for x-axis
fig = go.Figure(data=[go.Candlestick(
x=df['timestamp'],
open=df['open'],
high=df['high'],
low=df['low'],
close=df['close'],
increasing_line_color='#089981',
decreasing_line_color='#F23645'
)])
fig.update_layout(
title='BTC/USDC Realtime Candlestick (1m)',
yaxis_title='Price (USDC)',
xaxis_title='Time',
xaxis_rangeslider_visible=False,
template='plotly_dark'
)
return fig
def main():
trader = CryptoComTrader()
pio.renderers.default = "browser" # Open in browser
# Fetch and print BTC/USDC-related instruments
instruments = trader.get_instruments()
btc_usdc_instruments = [
inst for inst in instruments
if (
('BTC' in inst.get('base_ccy', '') or 'BTC' in inst.get('base_currency', '')) and
('USDC' in inst.get('quote_ccy', '') or 'USDC' in inst.get('quote_currency', ''))
)
]
print("BTC/USDC-related instruments:")
for inst in btc_usdc_instruments:
print(inst)
# Optionally, show balance (private API)
try:
balance = trader.get_balance("USDC")
print("USDC Balance:", balance)
except Exception as e:
print("[WARN] Could not fetch balance (private API):", e)
all_instruments = trader.get_instruments()
for inst in all_instruments:
print(inst)
while True:
try:
df = trader.get_candlesticks(timeframe='1m', count=60)
# fig = plot_candlesticks(df)
# if fig:
# fig.show()
if not df.empty:
print(df[['high', 'low', 'open', 'close', 'volume']])
else:
print("No data to print.")
time.sleep(10)
except KeyboardInterrupt:
print('Exiting...')
break
except Exception as e:
print(f'Error: {e}')
time.sleep(10)
if __name__ == '__main__':
main()