Merge branch 'main' of ssh://dep.sokaris.link:2222/Simon/Cycles

This commit is contained in:
Simon Moisy
2025-05-22 17:05:39 +08:00
7 changed files with 432 additions and 60 deletions

View File

@@ -4,23 +4,25 @@ class BollingerBands:
"""
Calculates Bollinger Bands for given financial data.
"""
def __init__(self, period: int = 20, std_dev_multiplier: float = 2.0):
def __init__(self, config):
"""
Initializes the BollingerBands calculator.
Args:
period (int): The period for the moving average and standard deviation.
std_dev_multiplier (float): The number of standard deviations for the upper and lower bands.
bb_width (float): The width of the Bollinger Bands.
"""
if period <= 0:
if config['bb_period'] <= 0:
raise ValueError("Period must be a positive integer.")
if std_dev_multiplier <= 0:
if config['trending']['bb_std_dev_multiplier'] <= 0 or config['sideways']['bb_std_dev_multiplier'] <= 0:
raise ValueError("Standard deviation multiplier must be positive.")
if config['bb_width'] <= 0:
raise ValueError("BB width must be positive.")
self.period = period
self.std_dev_multiplier = std_dev_multiplier
self.config = config
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame:
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close', squeeze = False) -> pd.DataFrame:
"""
Calculates Bollinger Bands and adds them to the DataFrame.
@@ -37,14 +39,37 @@ class BollingerBands:
if price_column not in data_df.columns:
raise ValueError(f"Price column '{price_column}' not found in DataFrame.")
# Calculate SMA
data_df['SMA'] = data_df[price_column].rolling(window=self.period).mean()
if not squeeze:
# Calculate SMA
data_df['SMA'] = data_df[price_column].rolling(window=self.config['bb_period']).mean()
# Calculate Standard Deviation
std_dev = data_df[price_column].rolling(window=self.period).std()
# Calculate Standard Deviation
std_dev = data_df[price_column].rolling(window=self.config['bb_period']).std()
# Calculate Upper and Lower Bands
data_df['UpperBand'] = data_df['SMA'] + (self.std_dev_multiplier * std_dev)
data_df['LowerBand'] = data_df['SMA'] - (self.std_dev_multiplier * std_dev)
# Calculate Upper and Lower Bands
data_df['UpperBand'] = data_df['SMA'] + (2.0* std_dev)
data_df['LowerBand'] = data_df['SMA'] - (2.0* std_dev)
# Calculate the width of the Bollinger Bands
data_df['BBWidth'] = (data_df['UpperBand'] - data_df['LowerBand']) / data_df['SMA']
# Calculate the market regime
# 1 = sideways, 0 = trending
data_df['MarketRegime'] = (data_df['BBWidth'] < self.config['bb_width']).astype(int)
if data_df['MarketRegime'].sum() > 0:
data_df['UpperBand'] = data_df['SMA'] + (self.config['trending']['bb_std_dev_multiplier'] * std_dev)
data_df['LowerBand'] = data_df['SMA'] - (self.config['trending']['bb_std_dev_multiplier'] * std_dev)
else:
data_df['UpperBand'] = data_df['SMA'] + (self.config['sideways']['bb_std_dev_multiplier'] * std_dev)
data_df['LowerBand'] = data_df['SMA'] - (self.config['sideways']['bb_std_dev_multiplier'] * std_dev)
else:
data_df['SMA'] = data_df[price_column].rolling(window=14).mean()
# Calculate Standard Deviation
std_dev = data_df[price_column].rolling(window=14).std()
# Calculate Upper and Lower Bands
data_df['UpperBand'] = data_df['SMA'] + 1.5* std_dev
data_df['LowerBand'] = data_df['SMA'] - 1.5* std_dev
return data_df

View File

@@ -5,7 +5,7 @@ class RSI:
"""
A class to calculate the Relative Strength Index (RSI).
"""
def __init__(self, period: int = 14):
def __init__(self, config):
"""
Initializes the RSI calculator.
@@ -13,9 +13,9 @@ class RSI:
period (int): The period for RSI calculation. Default is 14.
Must be a positive integer.
"""
if not isinstance(period, int) or period <= 0:
if not isinstance(config['rsi_period'], int) or config['rsi_period'] <= 0:
raise ValueError("Period must be a positive integer.")
self.period = period
self.period = config['rsi_period']
def calculate(self, data_df: pd.DataFrame, price_column: str = 'close') -> pd.DataFrame:
"""

View File

@@ -0,0 +1,131 @@
import pandas as pd
import numpy as np
from cycles.Analysis.boillinger_band import BollingerBands
class Strategy:
def __init__(self, config = None, logging = None):
if config is None:
raise ValueError("Config must be provided.")
self.config = config
self.logging = logging
def run(self, data, strategy_name):
if strategy_name == "MarketRegimeStrategy":
return self.MarketRegimeStrategy(data)
else:
if self.logging is not None:
self.logging.warning(f"Strategy {strategy_name} not found. Using no_strategy instead.")
return self.no_strategy(data)
def no_strategy(self, data):
"""No strategy: returns False for both buy and sell conditions"""
buy_condition = pd.Series([False] * len(data), index=data.index)
sell_condition = pd.Series([False] * len(data), index=data.index)
return buy_condition, sell_condition
def rsi_bollinger_confirmation(self, rsi, window=14, std_mult=1.5):
"""Calculate RSI Bollinger Bands for confirmation
Args:
rsi (Series): RSI values
window (int): Rolling window for SMA
std_mult (float): Standard deviation multiplier
Returns:
tuple: (oversold condition, overbought condition)
"""
valid_rsi = ~rsi.isna()
if not valid_rsi.any():
# Return empty Series if no valid RSI data
return pd.Series(False, index=rsi.index), pd.Series(False, index=rsi.index)
rsi_sma = rsi.rolling(window).mean()
rsi_std = rsi.rolling(window).std()
upper_rsi_band = rsi_sma + std_mult * rsi_std
lower_rsi_band = rsi_sma - std_mult * rsi_std
return (rsi < lower_rsi_band), (rsi > upper_rsi_band)
def MarketRegimeStrategy(self, data):
"""Optimized Bollinger Bands + RSI Strategy for Crypto Trading (Including Sideways Markets)
with adaptive Bollinger Bands
This advanced strategy combines volatility analysis, momentum confirmation, and regime detection
to adapt to Bitcoin's unique market conditions.
Entry Conditions:
- Trending Market (Breakout Mode):
Buy: Price < Lower Band ∧ RSI < 50 ∧ Volume Spike (≥1.5× 20D Avg)
Sell: Price > Upper Band ∧ RSI > 50 ∧ Volume Spike
- Sideways Market (Mean Reversion):
Buy: Price ≤ Lower Band ∧ RSI ≤ 40
Sell: Price ≥ Upper Band ∧ RSI ≥ 60
Enhanced with RSI Bollinger Squeeze for signal confirmation when enabled.
"""
# Initialize conditions as all False
buy_condition = pd.Series(False, index=data.index)
sell_condition = pd.Series(False, index=data.index)
# Create masks for different market regimes
sideways_mask = data['MarketRegime'] > 0
trending_mask = data['MarketRegime'] <= 0
valid_data_mask = ~data['MarketRegime'].isna() # Handle potential NaN values
# Calculate volume spike (≥1.5× 20D Avg)
if 'volume' in data.columns:
volume_20d_avg = data['volume'].rolling(window=20).mean()
volume_spike = data['volume'] >= 1.5 * volume_20d_avg
# Additional volume contraction filter for sideways markets
volume_30d_avg = data['volume'].rolling(window=30).mean()
volume_contraction = data['volume'] < 0.7 * volume_30d_avg
else:
# If volume data is not available, assume no volume spike
volume_spike = pd.Series(False, index=data.index)
volume_contraction = pd.Series(False, index=data.index)
if self.logging is not None:
self.logging.warning("Volume data not available. Volume conditions will not be triggered.")
# Calculate RSI Bollinger Squeeze confirmation
if 'RSI' in data.columns:
oversold_rsi, overbought_rsi = self.rsi_bollinger_confirmation(data['RSI'])
else:
oversold_rsi = pd.Series(False, index=data.index)
overbought_rsi = pd.Series(False, index=data.index)
if self.logging is not None:
self.logging.warning("RSI data not available. RSI Bollinger Squeeze will not be triggered.")
# Calculate conditions for sideways market (Mean Reversion)
if sideways_mask.any():
sideways_buy = (data['close'] <= data['LowerBand']) & (data['RSI'] <= 40)
sideways_sell = (data['close'] >= data['UpperBand']) & (data['RSI'] >= 60)
# Add enhanced confirmation for sideways markets
if self.config.get("SqueezeStrategy", False):
sideways_buy = sideways_buy & oversold_rsi & volume_contraction
sideways_sell = sideways_sell & overbought_rsi & volume_contraction
# Apply only where market is sideways and data is valid
buy_condition = buy_condition | (sideways_buy & sideways_mask & valid_data_mask)
sell_condition = sell_condition | (sideways_sell & sideways_mask & valid_data_mask)
# Calculate conditions for trending market (Breakout Mode)
if trending_mask.any():
trending_buy = (data['close'] < data['LowerBand']) & (data['RSI'] < 50) & volume_spike
trending_sell = (data['close'] > data['UpperBand']) & (data['RSI'] > 50) & volume_spike
# Add enhanced confirmation for trending markets
if self.config.get("SqueezeStrategy", False):
trending_buy = trending_buy & oversold_rsi
trending_sell = trending_sell & overbought_rsi
# Apply only where market is trending and data is valid
buy_condition = buy_condition | (trending_buy & trending_mask & valid_data_mask)
sell_condition = sell_condition | (trending_sell & trending_mask & valid_data_mask)
return buy_condition, sell_condition

View File

@@ -1,5 +1,80 @@
import pandas as pd
def check_data(data_df: pd.DataFrame) -> bool:
"""
Checks if the input DataFrame has a DatetimeIndex.
Args:
data_df (pd.DataFrame): DataFrame to check.
Returns:
bool: True if the DataFrame has a DatetimeIndex, False otherwise.
"""
if not isinstance(data_df.index, pd.DatetimeIndex):
print("Warning: Input DataFrame must have a DatetimeIndex.")
return False
agg_rules = {}
# Define aggregation rules based on available columns
if 'open' in data_df.columns:
agg_rules['open'] = 'first'
if 'high' in data_df.columns:
agg_rules['high'] = 'max'
if 'low' in data_df.columns:
agg_rules['low'] = 'min'
if 'close' in data_df.columns:
agg_rules['close'] = 'last'
if 'volume' in data_df.columns:
agg_rules['volume'] = 'sum'
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for daily aggregation.")
return False
return agg_rules
def aggregate_to_weekly(data_df: pd.DataFrame, weeks: int = 1) -> pd.DataFrame:
"""
Aggregates time-series financial data to weekly OHLCV format.
The input DataFrame is expected to have a DatetimeIndex.
'open' will be the first 'open' price of the week.
'close' will be the last 'close' price of the week.
'high' will be the maximum 'high' price of the week.
'low' will be the minimum 'low' price of the week.
'volume' (if present) will be the sum of volumes for the week.
Args:
data_df (pd.DataFrame): DataFrame with a DatetimeIndex and columns
like 'open', 'high', 'low', 'close', and optionally 'volume'.
weeks (int): The number of weeks to aggregate to. Default is 1.
Returns:
pd.DataFrame: DataFrame aggregated to weekly OHLCV data.
The index will be a DatetimeIndex with the time set to the start of the week.
Returns an empty DataFrame if no relevant OHLCV columns are found.
"""
agg_rules = check_data(data_df)
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for weekly aggregation.")
return pd.DataFrame(index=pd.to_datetime([]))
# Resample to weekly frequency and apply aggregation rules
weekly_data = data_df.resample(f'{weeks}W').agg(agg_rules)
weekly_data.dropna(how='all', inplace=True)
# Adjust timestamps to the start of the week
if not weekly_data.empty and isinstance(weekly_data.index, pd.DatetimeIndex):
weekly_data.index = weekly_data.index.floor('W')
return weekly_data
def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
"""
Aggregates time-series financial data to daily OHLCV format.
@@ -24,23 +99,9 @@ def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
Raises:
ValueError: If the input DataFrame does not have a DatetimeIndex.
"""
if not isinstance(data_df.index, pd.DatetimeIndex):
raise ValueError("Input DataFrame must have a DatetimeIndex.")
agg_rules = {}
# Define aggregation rules based on available columns
if 'open' in data_df.columns:
agg_rules['open'] = 'first'
if 'high' in data_df.columns:
agg_rules['high'] = 'max'
if 'low' in data_df.columns:
agg_rules['low'] = 'min'
if 'close' in data_df.columns:
agg_rules['close'] = 'last'
if 'volume' in data_df.columns:
agg_rules['volume'] = 'sum'
agg_rules = check_data(data_df)
if not agg_rules:
# Log a warning or raise an error if no relevant columns are found
# For now, returning an empty DataFrame with a message might be suitable for some cases
@@ -58,3 +119,43 @@ def aggregate_to_daily(data_df: pd.DataFrame) -> pd.DataFrame:
daily_data.dropna(how='all', inplace=True)
return daily_data
def aggregate_to_hourly(data_df: pd.DataFrame, hours: int = 1) -> pd.DataFrame:
"""
Aggregates time-series financial data to hourly OHLCV format.
The input DataFrame is expected to have a DatetimeIndex.
'open' will be the first 'open' price of the hour.
'close' will be the last 'close' price of the hour.
'high' will be the maximum 'high' price of the hour.
'low' will be the minimum 'low' price of the hour.
'volume' (if present) will be the sum of volumes for the hour.
Args:
data_df (pd.DataFrame): DataFrame with a DatetimeIndex and columns
like 'open', 'high', 'low', 'close', and optionally 'volume'.
hours (int): The number of hours to aggregate to. Default is 1.
Returns:
pd.DataFrame: DataFrame aggregated to hourly OHLCV data.
The index will be a DatetimeIndex with the time set to the start of the hour.
Returns an empty DataFrame if no relevant OHLCV columns are found.
"""
agg_rules = check_data(data_df)
if not agg_rules:
print("Warning: No standard OHLCV columns (open, high, low, close, volume) found for hourly aggregation.")
return pd.DataFrame(index=pd.to_datetime([]))
# Resample to hourly frequency and apply aggregation rules
hourly_data = data_df.resample(f'{hours}H').agg(agg_rules)
hourly_data.dropna(how='all', inplace=True)
# Adjust timestamps to the start of the hour
if not hourly_data.empty and isinstance(hourly_data.index, pd.DatetimeIndex):
hourly_data.index = hourly_data.index.floor('H')
return hourly_data