Enhanced SmartLiquiditySweepOBR Strategy - Production Ready
Timeframe
5m
Direction
Long Only
Stoploss
-5.0%
Trailing Stop
No
ROI
0m: 4.0%, 15m: 2.0%, 30m: 1.0%
Interface Version
3
Startup Candles
N/A
Indicators
12
freqtrade/freqtrade-strategies
Strategy 003 author@: Gerald Lonlas github@: https://github.com/freqtrade/freqtrade-strategies
# pragma pylint: disable=missing-docstring, invalid-name, pointless-string-statement
# flake8: noqa: F401
# isort: skip_file
# --- Do not remove these imports ---
import numpy as np
import pandas as pd
from datetime import datetime, timedelta, timezone
from pandas import DataFrame
from typing import Dict, Optional, Union, Tuple, List
from freqtrade.strategy import (
IStrategy,
Trade,
Order,
PairLocks,
informative, # @informative decorator
# Hyperopt Parameters
BooleanParameter,
CategoricalParameter,
DecimalParameter,
IntParameter,
RealParameter,
# timeframe helpers
timeframe_to_minutes,
timeframe_to_next_date,
timeframe_to_prev_date,
# Strategy helper functions
merge_informative_pair,
stoploss_from_absolute,
stoploss_from_open,
)
# --------------------------------
# Technical Library (Preferred) - Freqtrade optimized
from technical.indicators import (
williams_percent, atr, ema, VIDYA, mmar, madrid_sqz, laguerre,
vfi, vpci, chaikin_money_flow, vwma, sma, dema, tema, hull_moving_average,
bollinger_bands, chopiness, zema, ichimoku, stc, td_sequential
)
# TA-Lib for missing indicators
import talib.abstract as ta
# QTPyLib utilities
from technical import qtpylib
import logging
logger = logging.getLogger(__name__)
class SmartLiquiditySweepOBR_Enhanced(IStrategy):
"""
Enhanced SmartLiquiditySweepOBR Strategy - Production Ready
Features:
- Technical library indicators for optimal performance
- Advanced pattern detection with vectorized operations
- Market regime filtering with MMAR
- FreqAI integration ready
- Comprehensive hyperopt parameters
- Production-ready error handling
- Multi-tier exit strategy
- Dynamic position sizing
- Controlled logging and performance monitoring
Target: 85% Win Rate with Positive PNL
"""
INTERFACE_VERSION = 3
# === HYPEROPT PARAMETERS ===
# Order Block Detection
order_block_range = IntParameter(1, 5, default=1, space='buy', optimize=True)
ob_lookback_period = IntParameter(3, 10, default=9, space='buy', optimize=True)
ob_volume_threshold = DecimalParameter(0.5, 1.5, decimals=1, default=1.1, space='buy', optimize=True)
# Liquidity Sweep Detection
swing_high_lookback = IntParameter(5, 20, default=11, space='buy', optimize=True)
swing_low_lookback = IntParameter(5, 20, default=6, space='buy', optimize=True)
sweep_reversal_threshold = DecimalParameter(0.001, 0.005, decimals=3, default=0.004, space='buy', optimize=True)
volume_spike_multiplier = DecimalParameter(1.5, 4.0, decimals=1, default=2.5, space='buy', optimize=True)
# RSI & Momentum
rsi_buy_threshold = IntParameter(20, 40, default=25, space='buy', optimize=True)
rsi_sell_threshold = IntParameter(60, 80, default=60, space='sell', optimize=True)
williams_r_threshold = IntParameter(70, 90, default=70, space='buy', optimize=True)
laguerre_rsi_threshold = IntParameter(20, 40, default=25, space='buy', optimize=True)
# ATR & Volatility
atr_sl_multiplier = DecimalParameter(0.5, 2.0, decimals=1, default=1.9, space='buy', optimize=True)
atr_tp_multiplier = DecimalParameter(1.5, 4.0, decimals=1, default=2.0, space='sell', optimize=True)
# Advanced Indicators
vidya_length = IntParameter(5, 15, default=15, space='buy', optimize=True)
vfi_length = IntParameter(100, 150, default=104, space='buy', optimize=True)
vpci_length = IntParameter(15, 25, default=23, space='buy', optimize=True)
cmf_length = IntParameter(15, 25, default=23, space='buy', optimize=True)
# FreqAI Integration
ai_momentum_threshold = DecimalParameter(0.1, 1.0, decimals=2, default=0.82, space='buy', optimize=True)
ai_volatility_score = DecimalParameter(0.1, 1.0, decimals=2, default=0.49, space='buy', optimize=True)
ai_trend_strength = DecimalParameter(0.5, 1.0, decimals=2, default=0.63, space='buy', optimize=True)
ai_volume_spike_ratio = DecimalParameter(1.2, 3.0, decimals=1, default=2.0, space='buy', optimize=True)
ai_return_threshold = DecimalParameter(-0.02, 0.02, decimals=3, default=0.015, space='buy', optimize=True)
# Entry Confirmation
entry_delay_bars = IntParameter(1, 5, default=3, space='buy', optimize=True)
bos_confirmation_bars = IntParameter(1, 3, default=2, space='buy', optimize=True)
volume_confirmation_bars = IntParameter(1, 3, default=1, space='buy', optimize=True)
# Risk Management
max_leverage = IntParameter(5, 15, default=7, space='buy', optimize=True)
position_size_atr_multiplier = DecimalParameter(0.5, 2.0, decimals=1, default=0.5, space='buy', optimize=True)
max_position_size_pct = DecimalParameter(0.05, 0.20, decimals=2, default=0.15, space='buy', optimize=True)
volatility_factor_min = DecimalParameter(0.3, 0.7, decimals=1, default=0.4, space='buy', optimize=True)
volatility_factor_max = DecimalParameter(1.5, 3.0, decimals=1, default=1.5, space='buy', optimize=True)
# Take Profit & Exit - 4-Tier System
tp1_ratio = DecimalParameter(0.05, 0.15, decimals=2, default=0.10, space='sell', optimize=True) # 10%
tp2_ratio = DecimalParameter(0.20, 0.30, decimals=2, default=0.25, space='sell', optimize=True) # 25%
tp3_ratio = DecimalParameter(0.60, 0.90, decimals=2, default=0.75, space='sell', optimize=True) # 75%
tp4_ratio = DecimalParameter(0.90, 1.20, decimals=2, default=1.00, space='sell', optimize=True) # 100%
# Partial Exit Percentages
tp1_exit_pct = DecimalParameter(0.20, 0.40, decimals=2, default=0.25, space='sell', optimize=True) # 25% at TP1
tp2_exit_pct = DecimalParameter(0.20, 0.40, decimals=2, default=0.25, space='sell', optimize=True) # 25% at TP2
tp3_exit_pct = DecimalParameter(0.20, 0.40, decimals=2, default=0.25, space='sell', optimize=True) # 25% at TP3
# Remaining 25% at TP4 (full exit)
# Market Regime Filtering
trade_bullish_regime = BooleanParameter(default=True, space='buy', optimize=True)
trade_sideways_regime = BooleanParameter(default=True, space='buy', optimize=True)
trade_bearish_regime = BooleanParameter(default=True, space='buy', optimize=True)
trade_choppy_regime = BooleanParameter(default=True, space='buy', optimize=True)
# Volume Analysis
volume_trend_period = IntParameter(3, 10, default=4, space='buy', optimize=True)
volume_momentum_period = IntParameter(3, 10, default=5, space='buy', optimize=True)
low_volume_threshold = DecimalParameter(0.3, 0.7, decimals=1, default=0.7, space='sell', optimize=True)
# Trailing Stop Parameters - These will be optimized via trailing_space() method
# Note: These are fallback values, actual optimization happens in HyperOpt class
# === STRATEGY CONFIG ===
timeframe = '5m'
inf_timeframe = '15m'
startup_candle_count: int = 200 # Increased for advanced indicators
# Risk Management
position_adjustment_enable = True
minimal_roi = {
"0": 0.04,
"15": 0.02,
"30": 0.01
}
stoploss = -0.05
use_custom_stoploss = True
use_custom_exit = True
# Trailing stop will be set dynamically based on hyperopt parameters
# Wallet constraints
max_open_trades = 5
stake_amount = 10
# Leverage Configuration for Futures Trading
leverage_num = 10 # Set leverage to 10x for futures
use_custom_leverage = True # Enable custom leverage callback
def informative_pairs(self):
return [(f"{pair.split(':')[0]}:USDT", self.inf_timeframe) for pair in self.dp.current_whitelist()]
def detect_order_blocks_vectorized(self, dataframe: pd.DataFrame) -> pd.DataFrame:
"""Vectorized order block detection for optimal performance"""
try:
# Price change detection
dataframe['price_change'] = dataframe['close'].diff()
dataframe['volume_ratio'] = dataframe['volume'] / dataframe['volume'].rolling(window=10).mean()
# Bullish order block conditions
bullish_condition = (
(dataframe['price_change'] > 0) &
(dataframe['volume_ratio'] > self.ob_volume_threshold.value)
)
# Bearish order block conditions
bearish_condition = (
(dataframe['price_change'] < 0) &
(dataframe['volume_ratio'] > self.ob_volume_threshold.value)
)
# Use rolling windows for order block ranges
dataframe['bullish_ob_high'] = np.where(
bullish_condition,
dataframe['high'].rolling(self.order_block_range.value).max(),
0
)
dataframe['bullish_ob_low'] = np.where(
bullish_condition,
dataframe['low'].rolling(self.order_block_range.value).min(),
0
)
dataframe['bearish_ob_high'] = np.where(
bearish_condition,
dataframe['high'].rolling(self.order_block_range.value).max(),
0
)
dataframe['bearish_ob_low'] = np.where(
bearish_condition,
dataframe['low'].rolling(self.order_block_range.value).min(),
0
)
# Order block strength
dataframe['ob_strength'] = np.where(
bullish_condition | bearish_condition,
dataframe['volume_ratio'],
0
)
except Exception as e:
logger.error(f"Error in detect_order_blocks_vectorized: {e}")
# Fallback to zero values
dataframe['bullish_ob_high'] = 0
dataframe['bullish_ob_low'] = 0
dataframe['bearish_ob_high'] = 0
dataframe['bearish_ob_low'] = 0
dataframe['ob_strength'] = 0
return dataframe
def detect_fair_value_gaps_vectorized(self, dataframe: pd.DataFrame) -> pd.DataFrame:
"""Vectorized Fair Value Gap detection"""
try:
# Bullish FVG: current low > previous high
dataframe['fvg_bullish'] = np.where(
dataframe['low'] > dataframe['high'].shift(1),
dataframe['low'] - dataframe['high'].shift(1),
0
)
# Bearish FVG: current high < previous low
dataframe['fvg_bearish'] = np.where(
dataframe['high'] < dataframe['low'].shift(1),
dataframe['low'].shift(1) - dataframe['high'],
0
)
except Exception as e:
logger.error(f"Error in detect_fair_value_gaps_vectorized: {e}")
dataframe['fvg_bullish'] = 0
dataframe['fvg_bearish'] = 0
return dataframe
def detect_break_of_structure_vectorized(self, dataframe: pd.DataFrame) -> pd.DataFrame:
"""Vectorized Break of Structure detection"""
try:
# Bullish BOS: price breaks above recent swing high
dataframe['recent_high'] = dataframe['high'].rolling(self.swing_high_lookback.value).max()
dataframe['bos_bullish'] = dataframe['close'] > dataframe['recent_high'].shift(1)
# Bearish BOS: price breaks below recent swing low
dataframe['recent_low'] = dataframe['low'].rolling(self.swing_low_lookback.value).min()
dataframe['bos_bearish'] = dataframe['close'] < dataframe['recent_low'].shift(1)
# BOS confirmation
dataframe['bos_confirmed'] = (
dataframe['bos_bullish'].rolling(self.bos_confirmation_bars.value).sum() >= 1
)
except Exception as e:
logger.error(f"Error in detect_break_of_structure_vectorized: {e}")
dataframe['bos_bullish'] = False
dataframe['bos_bearish'] = False
dataframe['bos_confirmed'] = False
return dataframe
def detect_liquidity_sweeps_vectorized(self, dataframe: pd.DataFrame) -> pd.DataFrame:
"""Vectorized liquidity sweep detection"""
try:
# High liquidity sweep: price wicks above recent high then reverses
dataframe['recent_high_3'] = dataframe['high'].rolling(window=3).max()
dataframe['liquidity_sweep_high'] = (
(dataframe['high'] > dataframe['recent_high_3'].shift(1)) &
(dataframe['close'] < dataframe['recent_high_3'].shift(1) * (1 - self.sweep_reversal_threshold.value))
)
# Low liquidity sweep: price wicks below recent low then reverses
dataframe['recent_low_3'] = dataframe['low'].rolling(window=3).min()
dataframe['liquidity_sweep_low'] = (
(dataframe['low'] < dataframe['recent_low_3'].shift(1)) &
(dataframe['close'] > dataframe['recent_low_3'].shift(1) * (1 + self.sweep_reversal_threshold.value))
)
except Exception as e:
logger.error(f"Error in detect_liquidity_sweeps_vectorized: {e}")
dataframe['liquidity_sweep_high'] = False
dataframe['liquidity_sweep_low'] = False
return dataframe
def populate_indicators(self, dataframe: pd.DataFrame, metadata: dict) -> pd.DataFrame:
"""Populate indicators with Technical library for optimal performance"""
logger.info(f"=== STARTING populate_indicators for {metadata['pair']} ===")
logger.info(f"Dataframe shape: {dataframe.shape}")
logger.info(f"Dataframe columns: {list(dataframe.columns)}")
try:
# === CORE INDICATORS (Technical Library) ===
logger.info("Calculating RSI variants...")
# RSI variants - Using TA-Lib for RSI (Technical library doesn't have rsi function)
dataframe['rsi'] = ta.RSI(dataframe, timeperiod=14)
logger.info(f"RSI calculated, shape: {dataframe['rsi'].shape}, NaN count: {dataframe['rsi'].isna().sum()}")
# Laguerre RSI - Noise-filtered RSI for better signal quality
dataframe['laguerre_rsi'] = laguerre(dataframe, gamma=0.75, smooth=1)
logger.info(f"Laguerre RSI calculated, shape: {dataframe['laguerre_rsi'].shape}, NaN count: {dataframe['laguerre_rsi'].isna().sum()}")
# Williams %R
logger.info("Calculating Williams %R...")
dataframe['williams_r'] = williams_percent(dataframe, 14)
logger.info(f"Williams %R calculated, shape: {dataframe['williams_r'].shape}, NaN count: {dataframe['williams_r'].isna().sum()}")
# ATR for volatility
logger.info("Calculating ATR...")
dataframe['atr'] = atr(dataframe, 14)
logger.info(f"ATR calculated, shape: {dataframe['atr'].shape}, NaN count: {dataframe['atr'].isna().sum()}")
# Moving averages
logger.info("Calculating moving averages...")
dataframe['ema_20'] = ema(dataframe, 20)
dataframe['ema_50'] = ema(dataframe, 50)
dataframe['ema_200'] = ema(dataframe, 200)
dataframe['vidya'] = VIDYA(dataframe, length=self.vidya_length.value)
logger.info(f"Moving averages calculated, shapes: EMA20={dataframe['ema_20'].shape}, EMA50={dataframe['ema_50'].shape}, EMA200={dataframe['ema_200'].shape}, VIDYA={dataframe['vidya'].shape}")
# === ADVANCED INDICATORS ===
logger.info("Calculating advanced indicators...")
# Market regime analysis - Proper MMAR implementation
try:
# Use proper MMAR from Technical library - MMAR returns tuple of 10 Series
mmar_result = mmar(dataframe)
# Use the leadMA series (first element) as main market regime indicator
dataframe['mmar'] = mmar_result[0]
# Convert color codes to numeric values for easier processing
dataframe['mmar_numeric'] = dataframe['mmar'].map({
'grey': 3, # Sideways/neutral
'green': 1, # Bullish
'red': 2, # Bearish
'yellow': 4 # Choppy/warning
}).fillna(3) # Default to sideways if unknown color
logger.info(f"MMAR calculated, shape: {dataframe['mmar'].shape}, NaN count: {dataframe['mmar'].isna().sum()}")
except Exception as e:
logger.error(f"Error calculating MMAR: {e}")
# Fallback to EMA-based market regime
ema_20 = dataframe['ema_20']
ema_50 = dataframe['ema_50']
ema_200 = dataframe['ema_200']
dataframe['mmar'] = np.where(
(ema_20 > ema_50) & (ema_50 > ema_200), 1, # Bullish
np.where(
(ema_20 < ema_50) & (ema_50 < ema_200), 2, # Bearish
np.where(
abs(ema_20 - ema_50) / ema_50 < 0.01, 3, # Sideways
4 # Choppy
)
)
)
logger.info(f"Fallback market regime calculated, shape: {dataframe['mmar'].shape}, NaN count: {dataframe['mmar'].isna().sum()}")
try:
# Use proper Madrid SQZ from Technical library - handle multiple return values
sqz_result = madrid_sqz(dataframe, length=34, src='close', ref=13, sqzLen=5)
if isinstance(sqz_result, tuple) and len(sqz_result) >= 3:
# Madrid SQZ returns multiple values, use the first one as main squeeze indicator
dataframe['madrid_sqz'] = sqz_result[0]
logger.info(f"Madrid SQZ calculated, shape: {dataframe['madrid_sqz'].shape}, NaN count: {dataframe['madrid_sqz'].isna().sum()}")
else:
# Single value return
dataframe['madrid_sqz'] = sqz_result
logger.info(f"Madrid SQZ calculated, shape: {dataframe['madrid_sqz'].shape}, NaN count: {dataframe['madrid_sqz'].isna().sum()}")
except Exception as e:
logger.error(f"Error calculating Madrid SQZ: {e}")
# Simple fallback squeeze indicator based on price volatility
price_volatility = dataframe['close'].rolling(20).std() / dataframe['close'].rolling(20).mean()
volatility_ma = price_volatility.rolling(20).mean()
dataframe['madrid_sqz'] = np.where(price_volatility < volatility_ma * 0.8, 1, 0) # Squeeze when volatility is low
logger.info(f"Fallback volatility squeeze calculated, shape: {dataframe['madrid_sqz'].shape}, NaN count: {dataframe['madrid_sqz'].isna().sum()}")
# Volume indicators
logger.info("Calculating volume indicators...")
# Replace VFI with VWMA for volume trend analysis
try:
dataframe['vwma'] = vwma(dataframe, 20)
logger.info(f"VWMA calculated, shape: {dataframe['vwma'].shape}, NaN count: {dataframe['vwma'].isna().sum()}")
except Exception as e:
logger.error(f"Error calculating VWMA: {e}")
dataframe['vwma'] = dataframe['close']
# Simple volume trend indicator to replace VFI
dataframe['volume_trend_indicator'] = (
dataframe['volume'].rolling(10).mean() / dataframe['volume'].rolling(50).mean()
)
dataframe['vfi'] = dataframe['volume_trend_indicator'] # Use as VFI replacement
dataframe['vfima'] = dataframe['volume_trend_indicator'].rolling(5).mean()
dataframe['vfi_hist'] = dataframe['volume_trend_indicator'] - dataframe['vfima']
try:
dataframe['vpci'] = vpci(dataframe, self.vpci_length.value)
logger.info(f"VPCI calculated, shape: {dataframe['vpci'].shape}, NaN count: {dataframe['vpci'].isna().sum()}")
except Exception as e:
logger.error(f"Error calculating VPCI: {e}")
dataframe['vpci'] = 0
try:
dataframe['cmf'] = chaikin_money_flow(dataframe, self.cmf_length.value)
logger.info(f"CMF calculated, shape: {dataframe['cmf'].shape}, NaN count: {dataframe['cmf'].isna().sum()}")
except Exception as e:
logger.error(f"Error calculating CMF: {e}")
dataframe['cmf'] = 0
# === ENHANCED VOLUME ANALYSIS ===
logger.info("Calculating enhanced volume analysis...")
# Volume statistics with error handling
dataframe['volume_mean'] = dataframe['volume'].rolling(20).mean()
dataframe['volume_std'] = dataframe['volume'].rolling(20).std()
logger.info(f"Volume stats calculated, mean shape: {dataframe['volume_mean'].shape}, std shape: {dataframe['volume_std'].shape}")
# Safe volume z-score calculation
dataframe['volume_z_score'] = np.where(
dataframe['volume_std'] > 0,
(dataframe['volume'] - dataframe['volume_mean']) / dataframe['volume_std'],
0
)
# Enhanced volume analysis
dataframe['volume_ratio'] = dataframe['volume'] / dataframe['volume_mean']
dataframe['volume_spike'] = dataframe['volume_ratio'] > self.volume_spike_multiplier.value
dataframe['volume_trend'] = (
dataframe['volume'].rolling(self.volume_trend_period.value).mean() >
dataframe['volume'].rolling(20).mean()
)
dataframe['volume_momentum'] = dataframe['volume'].pct_change(self.volume_momentum_period.value)
# Volume confirmation - more lenient
dataframe['volume_confirmed'] = (
(dataframe['volume_ratio'] > 1.2) | # Volume above 120% of average
(dataframe['volume_spike'].rolling(self.volume_confirmation_bars.value).sum() >= 1)
)
# === TREND ANALYSIS ===
dataframe['trend_strength'] = abs(dataframe['ema_20'] - dataframe['ema_50']) / dataframe['atr']
dataframe['trend_bias'] = dataframe['ema_20'] > dataframe['ema_50']
dataframe['vidya_trend'] = dataframe['close'] > dataframe['vidya']
# === PATTERN DETECTION (Vectorized) ===
logger.info("Starting pattern detection...")
logger.info(f"Dataframe shape before pattern detection: {dataframe.shape}")
dataframe = self.detect_order_blocks_vectorized(dataframe)
logger.info(f"Order blocks detected, shape: {dataframe.shape}")
dataframe = self.detect_fair_value_gaps_vectorized(dataframe)
logger.info(f"Fair value gaps detected, shape: {dataframe.shape}")
dataframe = self.detect_break_of_structure_vectorized(dataframe)
logger.info(f"Break of structure detected, shape: {dataframe.shape}")
dataframe = self.detect_liquidity_sweeps_vectorized(dataframe)
logger.info(f"Liquidity sweeps detected, shape: {dataframe.shape}")
# Ensure all pattern columns exist with fallback values
logger.info("Ensuring all pattern columns exist...")
if 'liquidity_sweep_low' not in dataframe.columns:
dataframe['liquidity_sweep_low'] = False
logger.info("Added missing liquidity_sweep_low column")
if 'liquidity_sweep_high' not in dataframe.columns:
dataframe['liquidity_sweep_high'] = False
logger.info("Added missing liquidity_sweep_high column")
if 'bos_bullish' not in dataframe.columns:
dataframe['bos_bullish'] = False
logger.info("Added missing bos_bullish column")
if 'bos_bearish' not in dataframe.columns:
dataframe['bos_bearish'] = False
logger.info("Added missing bos_bearish column")
if 'bos_confirmed' not in dataframe.columns:
dataframe['bos_confirmed'] = False
logger.info("Added missing bos_confirmed column")
if 'bullish_ob_high' not in dataframe.columns:
dataframe['bullish_ob_high'] = 0
logger.info("Added missing bullish_ob_high column")
if 'fvg_bullish' not in dataframe.columns:
dataframe['fvg_bullish'] = 0
logger.info("Added missing fvg_bullish column")
logger.info(f"Final dataframe shape: {dataframe.shape}")
logger.info(f"Final dataframe columns: {list(dataframe.columns)}")
logger.info(f"Pattern columns check: liquidity_sweep_low={dataframe['liquidity_sweep_low'].sum()}, bos_bearish={dataframe['bos_bearish'].sum()}")
# === MULTI-TIMEFRAME ANALYSIS ===
if self.dp:
inf_tf = self.inf_timeframe
informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe=inf_tf)
if len(informative) > 0:
# HTF indicators
informative['rsi_inf'] = ta.RSI(informative, timeperiod=14)
informative['ema_20_inf'] = ema(informative, 20)
informative['ema_50_inf'] = ema(informative, 50)
informative['ema_200_inf'] = ema(informative, 200)
informative['trend_bias'] = informative['ema_20_inf'] > informative['ema_50_inf']
# Use EMA-based market regime for HTF instead of MMAR
informative['mmar_inf'] = np.where(
(informative['ema_20_inf'] > informative['ema_50_inf']) & (informative['ema_50_inf'] > informative['ema_200_inf']), 1, # Bullish
np.where(
(informative['ema_20_inf'] < informative['ema_50_inf']) & (informative['ema_50_inf'] < informative['ema_200_inf']), 2, # Bearish
np.where(
abs(informative['ema_20_inf'] - informative['ema_50_inf']) / informative['ema_50_inf'] < 0.01, 3, # Sideways
4 # Choppy
)
)
)
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, inf_tf, ffill=True)
# Use suffixed column names after merge
trend_col = f'trend_bias_{inf_tf}'
rsi_col = f'rsi_inf_{inf_tf}'
mmar_col = f'mmar_inf_{inf_tf}'
dataframe['htf_bullish_bias'] = (
(dataframe[trend_col] == True) &
(dataframe[rsi_col] > 40) &
(dataframe[rsi_col] < 80)
)
dataframe['htf_mmar'] = dataframe[mmar_col]
else:
dataframe['htf_bullish_bias'] = True
dataframe['htf_mmar'] = 1 # Default to bullish
# === FREQAI SIGNAL INJECTION ===
if 'freqai_predictions' in dataframe:
dataframe['ai_momentum'] = dataframe['freqai_predictions'].get('predicted_momentum', 0)
dataframe['ai_volatility'] = dataframe['freqai_predictions'].get('predicted_volatility_score', 0)
dataframe['ai_trend_strength'] = dataframe['freqai_predictions'].get('predicted_trend_strength', 0)
dataframe['ai_volume_spike_ratio'] = dataframe['freqai_predictions'].get('predicted_volume_spike_ratio', 1.0)
dataframe['ai_return_prediction'] = dataframe['freqai_predictions'].get('predicted_return_5m', 0)
else:
# Fallback values when FreqAI not available
dataframe['ai_momentum'] = 0.5 # Neutral
dataframe['ai_volatility'] = 0.5 # Neutral
dataframe['ai_trend_strength'] = 0.5 # Neutral
dataframe['ai_volume_spike_ratio'] = 1.0 # Normal
dataframe['ai_return_prediction'] = 0 # No prediction
# === MARKET REGIME FILTERING ===
# More lenient market regime filtering - allow trades in most regimes
dataframe['market_regime_allowed'] = (
((dataframe['mmar_numeric'] == 1) & self.trade_bullish_regime.value) |
((dataframe['mmar_numeric'] == 2) & self.trade_bearish_regime.value) |
((dataframe['mmar_numeric'] == 3) & self.trade_sideways_regime.value) |
((dataframe['mmar_numeric'] == 4) & self.trade_choppy_regime.value) |
(dataframe['mmar_numeric'].isna()) # Allow trades when regime is unclear
)
# === ENTRY DELAY ===
dataframe['entry_delay'] = True # Default to allow entry
if self.entry_delay_bars.value > 1:
# Require confirmation over multiple bars
dataframe['entry_delay'] = (
dataframe['volume_spike'].rolling(self.entry_delay_bars.value).sum() >= 1
)
# === DEBUGGING: Log all relevant columns and their stats ===
debug_columns = [
'rsi', 'laguerre_rsi', 'williams_r', 'atr', 'ema_20', 'ema_50', 'ema_200', 'vidya',
'mmar', 'madrid_sqz', 'vfi', 'vpci', 'cmf', 'vwma',
'trend_strength', 'trend_bias', 'vidya_trend',
'volume_mean', 'volume_std', 'volume_z_score', 'volume_ratio', 'volume_spike',
'volume_trend', 'volume_momentum', 'volume_confirmed',
'liquidity_sweep_low', 'bullish_ob_high', 'fvg_bullish', 'bos_bullish', 'bos_confirmed',
'ai_momentum', 'ai_volatility', 'ai_trend_strength', 'ai_volume_spike_ratio', 'ai_return_prediction'
]
logger.info(f"=== DEBUGGING INDICATOR COLUMNS for {metadata['pair']} ===")
for col in debug_columns:
if col in dataframe.columns:
logger.info(f"{col}: NaN count={dataframe[col].isna().sum()}, unique={dataframe[col].unique()[:10]}")
else:
logger.warning(f"{col}: MISSING")
# Initialize entry/exit columns
dataframe['enter_long'] = 0
dataframe['exit_long'] = 0
logger.info("Initialized enter_long and exit_long columns")
except Exception as e:
logger.error(f"Error in populate_indicators for {metadata['pair']}: {e}")
# Return dataframe with basic indicators as fallback
dataframe['rsi'] = 50
dataframe['williams_r'] = -50
dataframe['atr'] = 0.01
dataframe['market_regime_allowed'] = True
return dataframe
def populate_entry_trend(self, dataframe: pd.DataFrame, metadata: dict) -> pd.DataFrame:
"""Multi-condition entry logic with proper validation"""
logger.info(f"=== STARTING populate_entry_trend for {metadata['pair']} ===")
dataframe['enter_long'] = 0
try:
# === PRIMARY CONDITIONS (Need 2/5) ===
primary_conditions = [
dataframe['rsi'] < self.rsi_buy_threshold.value,
dataframe['williams_r'] > -self.williams_r_threshold.value,
dataframe['laguerre_rsi'] < self.laguerre_rsi_threshold.value,
dataframe['liquidity_sweep_low'],
dataframe['bullish_ob_high'] > 0,
dataframe['fvg_bullish'] > 0
]
# === SECONDARY CONDITIONS (Need 3/8) ===
secondary_conditions = [
dataframe['volume_spike'],
dataframe['trend_strength'] > 0.3,
dataframe.get('htf_bullish_bias', True), # Default to True if not available
dataframe['bos_confirmed'],
dataframe['vfi'] > 0,
dataframe['vpci'] > 0,
dataframe['cmf'] > 0,
dataframe['vidya_trend']
]
# === AI CONDITIONS (Need 2/5 if available) ===
ai_conditions = [
dataframe.get('ai_momentum', 0.5) > self.ai_momentum_threshold.value,
dataframe.get('ai_volatility', 0.5) < self.ai_volatility_score.value,
dataframe.get('ai_trend_strength', 0.5) > self.ai_trend_strength.value,
dataframe.get('ai_volume_spike_ratio', 1.0) > self.ai_volume_spike_ratio.value,
dataframe.get('ai_return_prediction', 0.0) > self.ai_return_threshold.value
]
# === CONDITION COUNTING ===
primary_count = sum(primary_conditions)
secondary_count = sum(secondary_conditions)
ai_count = sum(ai_conditions)
# Check if AI data is available (non-zero values indicate availability)
ai_available = any(dataframe.get('ai_momentum', 0.5) != 0.5)
# === VALIDATION ===
primary_met = primary_count >= 1 # Need at least 1 primary condition
secondary_met = secondary_count >= 2 # Need at least 2 secondary conditions
ai_met = ai_count >= 1 if ai_available else True # Skip AI if not available
# === MARKET REGIME FILTER ===
regime_allowed = dataframe.get('market_regime_allowed', True)
# === FINAL ENTRY CONDITION ===
entry_condition = (
primary_met &
secondary_met &
ai_met &
regime_allowed &
dataframe['volume_confirmed']
)
# Apply entry delay
entry_condition = entry_condition & dataframe.get('entry_delay', True)
dataframe.loc[entry_condition, 'enter_long'] = 1
# Detailed debugging
logger.info(f"Entry conditions for {metadata['pair']}:")
logger.info(f" Primary met: {primary_met}")
logger.info(f" Secondary met: {secondary_met}")
logger.info(f" AI met: {ai_met}")
logger.info(f" Regime allowed: {regime_allowed.iloc[-1] if len(regime_allowed) > 0 else 'N/A'}")
logger.info(f" Volume confirmed: {dataframe['volume_confirmed'].iloc[-1] if len(dataframe) > 0 else 'N/A'}")
logger.info(f" Entry signals generated: {dataframe['enter_long'].sum()}")
# Controlled logging - only log when entry signal is generated
if entry_condition.iloc[-1] if len(entry_condition) > 0 else False:
logger.info(f"🎯 ENTRY SIGNAL for {metadata['pair']} - RSI: {dataframe['rsi'].iloc[-1]:.2f}")
except Exception as e:
logger.error(f"Error in populate_entry_trend for {metadata['pair']}: {e}")
# Fallback to simple condition
dataframe.loc[dataframe['rsi'] < 30, 'enter_long'] = 1
return dataframe
def populate_exit_trend(self, dataframe: pd.DataFrame, metadata: dict) -> pd.DataFrame:
"""Multi-tier exit logic"""
logger.info(f"=== STARTING populate_exit_trend for {metadata['pair']} ===")
dataframe['exit_long'] = 0
try:
# === IMMEDIATE EXIT CONDITIONS ===
immediate_exit_conditions = (
(dataframe['rsi'] > self.rsi_sell_threshold.value) |
(dataframe['williams_r'] < -20) |
(dataframe['bos_bearish']) |
(dataframe['liquidity_sweep_high']) |
(dataframe.get('ai_return_prediction', 0.0) < -0.01) |
(dataframe['volume'] < dataframe['volume_mean'] * self.low_volume_threshold.value)
)
dataframe.loc[immediate_exit_conditions, 'exit_long'] = 1
# Debug exit conditions
logger.info(f"Exit conditions for {metadata['pair']}:")
logger.info(f" Exit signals generated: {dataframe['exit_long'].sum()}")
except Exception as e:
logger.error(f"Error in populate_exit_trend for {metadata['pair']}: {e}")
# Fallback exit condition
dataframe.loc[dataframe['rsi'] > 80, 'exit_long'] = 1
return dataframe
def custom_stoploss(self, pair: str, trade: Trade, current_time: 'datetime', current_rate: float,
current_profit: float, **kwargs):
"""Dynamic stop loss with break-even after TP1"""
try:
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
last_candle = dataframe.iloc[-1].squeeze()
# === BREAK-EVEN STOP LOSS AFTER TP1 ===
# If we've reached TP1 (10% profit), move stop loss to break-even
if current_profit >= self.tp1_ratio.value:
return -0.001 # Very tight stop loss near break-even (0.1%)
# === DYNAMIC STOP LOSS BEFORE TP1 ===
# Method 1: ATR-based stop loss
atr = last_candle.get('atr', 0.01)
atr_stop = current_rate - (atr * self.atr_sl_multiplier.value)
# Method 2: Order block stop loss
ob_stop = last_candle.get('bullish_ob_low', 0)
if ob_stop > 0:
ob_stop = ob_stop * 0.995 # Add 0.5% buffer
# Method 3: VIDYA-based stop loss
vidya_stop = last_candle.get('vidya', current_rate) * 0.98 # 2% below VIDYA
# Method 4: Support level stop loss
support_stop = last_candle['low'] * 0.995 # 0.5% below current low
# Use the highest stop loss (most conservative)
stop_price = max(atr_stop, ob_stop, vidya_stop, support_stop)
return (stop_price - current_rate) / current_rate
except Exception as e:
logger.error(f"Error in custom_stoploss for {pair}: {e}")
# Fallback to fixed stop loss
return -0.05
def custom_exit(self, pair: str, trade: Trade, current_time: 'datetime', current_rate: float,
current_profit: float, **kwargs):
"""Enhanced custom exit with 4-tier partial exits and break-even SL"""
try:
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
last_candle = dataframe.iloc[-1].squeeze()
# === 4-TIER TAKE PROFIT SYSTEM ===
# TP4: Final exit at 100% profit
if current_profit >= self.tp4_ratio.value:
return 'tp4_final_exit'
# TP3: Third partial exit at 75% profit
elif current_profit >= self.tp3_ratio.value:
# Check if we still have enough position to exit
remaining_pct = trade.amount / trade.initial_amount
if remaining_pct > self.tp3_exit_pct.value:
return 'tp3_partial'
# TP2: Second partial exit at 25% profit
elif current_profit >= self.tp2_ratio.value:
# Check if we still have enough position to exit
remaining_pct = trade.amount / trade.initial_amount
if remaining_pct > self.tp2_exit_pct.value:
return 'tp2_partial'
# TP1: First partial exit at 10% profit
elif current_profit >= self.tp1_ratio.value:
# Check if we still have enough position to exit
remaining_pct = trade.amount / trade.initial_amount
if remaining_pct > self.tp1_exit_pct.value:
return 'tp1_partial'
# === AI-BASED EXITS ===
if last_candle.get('ai_return_prediction', 0) < -0.01:
return 'ai_reversal_signal'
if last_candle.get('ai_trend_strength', 1) < 0.3:
return 'ai_trend_weakness'
# === TECHNICAL EXITS ===
if last_candle.get('rsi', 50) > 80:
return 'rsi_overbought'
if last_candle.get('bos_bearish', False):
return 'structure_broken'
# === VOLUME-BASED EXITS ===
if last_candle.get('volume', 0) < last_candle.get('volume_mean', 1) * self.low_volume_threshold.value:
return 'low_volume_exit'
# === MARKET REGIME EXITS ===
if last_candle.get('mmar_numeric', 1) == 2: # Bearish regime
return 'bearish_regime_exit'
return None
except Exception as e:
logger.error(f"Error in custom_exit for {pair}: {e}")
return None
def custom_stake_amount(self, pair: str, current_time: 'datetime', current_rate: float,
proposed_stake: float, min_stake: float, max_stake: float,
leverage: float, entry_tag: str, side: str, **kwargs) -> float:
"""Dynamic position sizing based on volatility and confidence"""
try:
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
last_candle = dataframe.iloc[-1].squeeze()
# Base position size
base_stake = proposed_stake
# Adjust based on volatility
atr = last_candle.get('atr', 0.01)
if atr > 0:
volatility_factor = 1.0 / (atr / current_rate)
volatility_factor = np.clip(
volatility_factor,
self.volatility_factor_min.value,
self.volatility_factor_max.value
)
base_stake *= volatility_factor
# Adjust based on AI confidence
ai_confidence = last_candle.get('ai_trend_strength', 0.5)
ai_factor = np.clip(ai_confidence, 0.5, 1.5)
base_stake *= ai_factor
# Adjust based on market regime
mmar = last_candle.get('mmar_numeric', 1)
if mmar == 1: # Bullish regime
base_stake *= 1.2
elif mmar == 3: # Sideways regime
base_stake *= 0.8
elif mmar == 2: # Bearish regime
base_stake *= 0.6
# Adjust based on volume confirmation
if last_candle.get('volume_confirmed', False):
base_stake *= 1.1
return np.clip(base_stake, min_stake, max_stake)
except Exception as e:
logger.error(f"Error in custom_stake_amount for {pair}: {e}")
return proposed_stake
def leverage(self, pair: str, current_time: 'datetime', current_rate: float,
proposed_leverage: float, max_leverage: float, entry_tag: str, side: str,
**kwargs) -> float:
"""Dynamic leverage based on volatility and market conditions"""
try:
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
last_candle = dataframe.iloc[-1].squeeze()
# Base leverage
base_leverage = min(self.max_leverage.value, max_leverage)
# Adjust based on volatility
atr = last_candle.get('atr', 0.01)
if atr > 0:
volatility_factor = 1.0 / (atr / current_rate)
volatility_factor = np.clip(volatility_factor, 0.5, 2.0)
base_leverage *= volatility_factor
# Adjust based on AI confidence
ai_confidence = last_candle.get('ai_trend_strength', 0.5)
ai_factor = np.clip(ai_confidence, 0.5, 1.5)
base_leverage *= ai_factor
# Adjust based on market regime
mmar = last_candle.get('mmar_numeric', 1)
if mmar == 1: # Bullish regime
base_leverage *= 1.1
elif mmar == 2: # Bearish regime
base_leverage *= 0.8
return min(base_leverage, max_leverage)
except Exception as e:
logger.error(f"Error in leverage method for {pair}: {e}")
return min(self.max_leverage.value, max_leverage)
def confirm_trade_entry(self, pair: str, order_type: str, amount: float, rate: float,
time_in_force: str, current_time: 'datetime', entry_tag: str,
side: str, **kwargs) -> bool:
"""Additional confirmation before trade entry"""
try:
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
last_candle = dataframe.iloc[-1].squeeze()
# Check if we're in a good market condition
if last_candle.get('ai_volatility', 0.5) > 0.8:
return False # Too volatile
# Check market regime
mmar = last_candle.get('mmar_numeric', 1)
if mmar == 4: # Choppy regime
return False # Avoid choppy markets
# Check volume conditions
if not last_candle.get('volume_confirmed', False):
return False # No volume confirmation
# Check trend strength
if last_candle.get('trend_strength', 0) < 0.2:
return False # Weak trend
return True
except Exception as e:
logger.error(f"Error in confirm_trade_entry for {pair}: {e}")
return True # Allow trade if error occurs
def log_performance_metrics(self, dataframe: pd.DataFrame, metadata: dict):
"""Log performance metrics for monitoring"""
try:
if len(dataframe) > 0:
last_candle = dataframe.iloc[-1]
logger.info(f"=== PERFORMANCE METRICS - {metadata['pair']} ===")
# Handle RSI safely
rsi_val = last_candle.get('rsi', 50)
if pd.isna(rsi_val):
logger.info("RSI: NaN")
else:
logger.info(f"RSI: {rsi_val:.2f}")
# Handle Laguerre RSI safely
laguerre_val = last_candle.get('laguerre_rsi', 50)
if pd.isna(laguerre_val):
logger.info("Laguerre RSI: NaN")
else:
logger.info(f"Laguerre RSI: {laguerre_val:.2f}")
# Handle other metrics safely
volume_z = last_candle.get('volume_z_score', 0)
if pd.isna(volume_z):
logger.info("Volume Z-Score: NaN")
else:
logger.info(f"Volume Z-Score: {volume_z:.2f}")
trend_strength = last_candle.get('trend_strength', 0)
if pd.isna(trend_strength):
logger.info("Trend Strength: NaN")
else:
logger.info(f"Trend Strength: {trend_strength:.3f}")
logger.info(f"MMAR Regime: {last_candle.get('mmar_numeric', 'N/A')}")
vfi_val = last_candle.get('vfi', 0)
if pd.isna(vfi_val):
logger.info("VFI: NaN")
else:
logger.info(f"VFI: {vfi_val:.3f}")
vpci_val = last_candle.get('vpci', 0)
if pd.isna(vpci_val):
logger.info("VPCI: NaN")
else:
logger.info(f"VPCI: {vpci_val:.3f}")
cmf_val = last_candle.get('cmf', 0)
if pd.isna(cmf_val):
logger.info("CMF: NaN")
else:
logger.info(f"CMF: {cmf_val:.3f}")
ai_momentum = last_candle.get('ai_momentum', 0)
if pd.isna(ai_momentum):
logger.info("AI Momentum: NaN")
else:
logger.info(f"AI Momentum: {ai_momentum:.3f}")
ai_trend = last_candle.get('ai_trend_strength', 0)
if pd.isna(ai_trend):
logger.info("AI Trend Strength: NaN")
else:
logger.info(f"AI Trend Strength: {ai_trend:.3f}")
except Exception as e:
logger.error(f"Error in log_performance_metrics: {e}")
def bot_loop_start(self, **kwargs) -> None:
"""Called at the start of the bot iteration (one loop = one strategy tick)"""
# Controlled logging - only log performance metrics periodically
pass
class HyperOpt:
"""Hyperopt class for custom parameter optimization"""
@staticmethod
def trailing_space():
"""
Define custom trailing stop optimization space.
All parameters here are mandatory, you can only modify their type or the range.
"""
from freqtrade.optimize.space import Categorical, SKDecimal, Dimension
return [
# Fixed to true, if optimizing trailing_stop we assume to use trailing stop at all times.
Categorical([True], name='trailing_stop'),
# Trailing stop positive - when to start using positive trailing stop
SKDecimal(0.01, 0.05, decimals=3, name='trailing_stop_positive'),
# Trailing stop positive offset - should be greater than trailing_stop_positive
# This intermediate parameter is used as the value of the difference between them
SKDecimal(0.001, 0.03, decimals=3, name='trailing_stop_positive_offset_p1'),
# Whether to only start trailing after offset is reached
Categorical([True, False], name='trailing_only_offset_is_reached'),
]