lowkey_backtest/cryptoquant_client.py

264 lines
12 KiB
Python
Raw Normal View History

import ssl
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import os
import time
import random
from datetime import datetime, timedelta
from typing import Dict, List, Optional
import logging
import strategy_config as config
# Set up logging
logger = logging.getLogger(__name__)
class RateLimiter:
"""Simple rate limiter for CryptoQuant's API."""
def __init__(self, max_requests: int = None, window_seconds: int = 60):
# Default to 20 if not specified, or read from env
if max_requests is None:
max_requests = int(os.getenv('CRYPTOQUANT_RATE_LIMIT', 20))
self.max_requests = max_requests
self.window_seconds = window_seconds
self.requests = [] # List of timestamps
def wait_if_needed(self) -> None:
"""Sleep if we've hit the rate limit in current window."""
now = time.time()
# Remove requests outside the window
self.requests = [ts for ts in self.requests if now -
ts < self.window_seconds]
# If at limit, sleep until oldest request expires
if len(self.requests) >= self.max_requests:
sleep_time = self.window_seconds - (now - self.requests[0])
if sleep_time > 0:
logger.warning(
f"Rate limit hit. Sleeping {sleep_time:.1f}s...")
time.sleep(sleep_time)
# Record this request
self.requests.append(now)
def get_remaining(self) -> int:
"""Get requests remaining in current window."""
now = time.time()
recent_requests = [
ts for ts in self.requests if now - ts < self.window_seconds]
return self.max_requests - len(recent_requests)
class CryptoQuantClient:
def __init__(self, api_key: str = None):
self.api_key = api_key or os.getenv('CRYPTOQUANT_API_KEY')
if not self.api_key:
raise ValueError("API key required - CRYPTOQUANT_API_KEY env var not set.")
self.base_url = 'https://api.cryptoquant.com/v1'
headers = {
'Authorization': f'Bearer {self.api_key}',
'User-Agent': f'CryptoQuantBot/{random.uniform(1.0, 2.0):.1f}'
}
self.logger = logger
self.rate_limiter = RateLimiter()
# Create a robust session with retries
self.session = requests.Session()
ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
ssl_context.options |= 0x80000000
retry_strategy = Retry(
total=5,
status_forcelist=[429, 500, 502, 503, 504],
allowed_methods=["HEAD", "GET", "OPTIONS"],
backoff_factor=1
)
class SSLAdapter(HTTPAdapter):
def init_poolmanager(self, *args, **kwargs):
kwargs['ssl_context'] = ssl_context
return super().init_poolmanager(*args, **kwargs)
adapter = SSLAdapter(max_retries=retry_strategy)
self.session.mount("https://", adapter)
self.session.headers.update(headers)
def _make_request(self, method: str, url: str, **kwargs) -> requests.Response:
"""Internal method with robust error handling."""
try:
if method.upper() == 'GET':
response = self.session.get(url, **kwargs, timeout=30)
elif method.upper() == 'POST':
response = self.session.post(url, **kwargs, timeout=30)
else:
raise ValueError(f"Unsupported method: {method}")
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
self.logger.error(f"Request failed after all retries: {e}")
raise
def _fetch_metric_chunked(self, url_suffix: str, params: Dict, days_back: int, chunk_days: int = 90) -> List[Dict]:
"""Fetch data in chunks to avoid API limits."""
all_data = []
end_date = datetime.now()
start_date = end_date - timedelta(days=days_back)
current_start = start_date
while current_start < end_date:
current_end = min(current_start + timedelta(days=chunk_days), end_date)
# Format dates for API
chunk_params = params.copy()
chunk_params.update({
'from': current_start.strftime('%Y%m%d'),
'to': current_end.strftime('%Y%m%d'),
'limit': 10000 # Request max limit just in case
})
self.rate_limiter.wait_if_needed()
url = f"{self.base_url}/{url_suffix}"
try:
self.logger.info(f"Fetching chunk {chunk_params['from']} to {chunk_params['to']}")
response = self._make_request('GET', url, params=chunk_params)
# Check for specific "Out of allowed range" error to stop early if needed?
# Actually, if we iterate Old -> New, the early chunks fail, later succeed.
# If we want to optimize, we could start New -> Old and stop on failure.
# But current Old -> New approach ensures we get the most recent valid data eventually.
data = response.json()
result = data.get('result', {}).get('data', [])
all_data.extend(result)
except requests.exceptions.HTTPError as e:
# Handle 400 specially if it's "Out of allowed request range"
if e.response.status_code == 400:
self.logger.warning(f"Chunk failed (likely data limit): {e}. Continuing to next chunk...")
else:
self.logger.error(f"HTTP Error fetching chunk: {e}")
except Exception as e:
self.logger.error(f"Error fetching chunk: {e}")
current_start = current_end + timedelta(days=1) # Move to next day
time.sleep(0.5) # Gentle pace
# Remove duplicates based on date if any? usually API is clean.
return all_data
def get_nupl(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 364) -> List[Dict]:
logger.info(f"Fetching {asset} NUPL ({days_back} days)")
params = {'window': window}
return self._fetch_metric_chunked(f"{asset.lower()}/network-indicator/nupl", params, days_back)
def get_mvrv(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 364) -> List[Dict]:
logger.info(f"Fetching {asset} MVRV ({days_back} days)")
params = {'window': window}
return self._fetch_metric_chunked(f"{asset.lower()}/market-indicator/mvrv", params, days_back)
def get_lth_sopr(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 364) -> List[Dict]:
logger.info(f"Fetching {asset} LTH-SOPR ({days_back} days)")
params = {'window': window}
return self._fetch_metric_chunked(f"{asset.lower()}/market-indicator/sopr", params, days_back)
def get_puell_multiple(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 364) -> List[Dict]:
logger.info(f"Fetching {asset} Puell Multiple ({days_back} days)")
params = {'window': window}
return self._fetch_metric_chunked(f"{asset.lower()}/network-indicator/puell-multiple", params, days_back)
def get_fund_flow_ratio(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 364) -> List[Dict]:
logger.info(f"Fetching {asset} Fund Flow Ratio ({days_back} days)")
params = {'window': window, 'exchange': 'all_exchange'}
return self._fetch_metric_chunked(f"{asset.lower()}/flow-indicator/fund-flow-ratio", params, days_back)
def get_funding_rates(self, asset: str = config.ASSET, exchange: str = 'all_exchange', window: str = 'day', days_back: int = 90) -> List[Dict]:
self.logger.info(f"Fetching {asset} funding rates ({days_back} days)")
params = {'window': window, 'exchange': exchange}
return self._fetch_metric_chunked(f"{asset.lower()}/market-data/funding-rates", params, days_back)
def get_exchange_net_flow(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 90) -> List[Dict]:
self.logger.info(f"Fetching {asset} exchange netflow ({days_back} days)")
params = {'window': window, 'exchange': 'all_exchange'}
return self._fetch_metric_chunked(f"{asset.lower()}/exchange-flows/netflow", params, days_back)
def get_sopr_ratio(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 90) -> List[Dict]:
self.logger.info(f"Fetching {asset} SOPR Ratio ({days_back} days)")
params = {'window': window}
return self._fetch_metric_chunked(f"{asset.lower()}/market-indicator/sopr-ratio", params, days_back)
def get_active_addresses(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 90) -> List[Dict]:
self.logger.info(f"Fetching {asset} active addresses ({days_back} days)")
params = {'window': window}
if asset.lower() == 'eth':
suffix = f"{asset.lower()}/network-data/addresses-count-all"
else:
suffix = f"{asset.lower()}/network-data/addresses-count"
return self._fetch_metric_chunked(suffix, params, days_back)
def get_leverage_ratio(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 90) -> List[Dict]:
"""Get Estimated Leverage Ratio."""
self.logger.info(f"Fetching {asset} Estimated Leverage Ratio ({days_back} days)")
params = {'window': window, 'exchange': 'all_exchange'}
return self._fetch_metric_chunked(f"{asset.lower()}/market-indicator/estimated-leverage-ratio", params, days_back)
def get_exchange_whale_ratio(self, asset: str = config.ASSET, window: str = 'day', days_back: int = 90) -> List[Dict]:
"""Get the ratio of whale-sized deposits to total exchange deposits."""
self.logger.info(f"Fetching {asset} Exchange Whale Ratio ({days_back} days)")
params = {'window': window, 'exchange': 'all_exchange'}
return self._fetch_metric_chunked(f"{asset.lower()}/flow-indicator/exchange-whale-ratio", params, days_back)
def fetch_all_onchain(self, asset: str = config.ASSET, days_back: int = 364) -> Dict[str, List[Dict]]:
"""Batch all features from config.ONCHAIN_FEATURE_NAMES."""
features = {}
for feat in config.ONCHAIN_FEATURE_NAMES:
self.rate_limiter.wait_if_needed()
if feat == 'funding_rate':
features[feat] = self.get_funding_rates(
asset, 'all_exchange', 'day', days_back)
elif feat == 'net_exchange_flow':
features[feat] = self.get_exchange_net_flow(
asset, 'day', days_back)
elif feat == 'sopr_ratio':
features[feat] = self.get_sopr_ratio(asset, 'day', days_back)
elif feat == 'active_addresses':
features[feat] = self.get_active_addresses(
asset, 'day', days_back)
elif feat == 'leverage_ratio':
features[feat] = self.get_leverage_ratio(
asset, 'day', days_back)
elif feat == 'exchange_whale_ratio':
features[feat] = self.get_exchange_whale_ratio(
asset, 'day', days_back)
elif feat == 'nupl':
features[feat] = self.get_nupl(asset, 'day', days_back)
elif feat == 'mvrv':
features[feat] = self.get_mvrv(asset, 'day', days_back)
elif feat == 'lth_sopr':
features[feat] = self.get_lth_sopr(asset, 'day', days_back)
elif feat == 'puell_multiple':
features[feat] = self.get_puell_multiple(
asset, 'day', days_back)
elif feat == 'fund_flow_ratio':
features[feat] = self.get_fund_flow_ratio(
asset, 'day', days_back)
time.sleep(1) # Pace for limits
return features
if __name__ == "__main__":
# Test
try:
client = CryptoQuantClient()
print("Fetching funding rates as test...")
rates = client.get_funding_rates(days_back=7)
print(f"Got {len(rates)} records")
if rates:
print(rates[-1])
except Exception as e:
print(f"Error: {e}")