Lowered signal tresholds so low that we got signals flowing. Few debug scripts to see way signals were not firing. Fix analyst.py indicator calculation to use TA-lib.
This commit is contained in:
@@ -121,38 +121,44 @@ analysis_conn.commit()
|
|||||||
|
|
||||||
# ========== Technical Indicator Functions ==========
|
# ========== Technical Indicator Functions ==========
|
||||||
def compute_indicators(df):
|
def compute_indicators(df):
|
||||||
close = df['close']
|
"""Compute indicators using TA-Lib for accuracy"""
|
||||||
|
import talib
|
||||||
|
|
||||||
|
close = df['close'].values
|
||||||
|
high = df['high'].values
|
||||||
|
low = df['low'].values
|
||||||
|
volume = df['volume'].values
|
||||||
|
|
||||||
# EMA and SMA
|
# EMA and SMA
|
||||||
df['ema_9'] = close.ewm(span=9, adjust=False).mean()
|
df['ema_9'] = talib.EMA(close, timeperiod=9)
|
||||||
df['ema_21'] = close.ewm(span=21, adjust=False).mean()
|
df['ema_21'] = talib.EMA(close, timeperiod=21)
|
||||||
df['sma_50'] = close.rolling(window=50, min_periods=1).mean()
|
df['sma_50'] = talib.SMA(close, timeperiod=50)
|
||||||
df['sma_200'] = close.rolling(window=200, min_periods=1).mean()
|
df['sma_200'] = talib.SMA(close, timeperiod=200)
|
||||||
# RSI (14): using 14-period gains/losses and RSI formula (100 - 100/(1+RS)):contentReference[oaicite:3]{index=3}
|
|
||||||
delta = close.diff()
|
# RSI (14) - Proper calculation
|
||||||
gain = delta.clip(lower=0)
|
df['rsi_14'] = talib.RSI(close, timeperiod=14)
|
||||||
loss = -delta.clip(upper=0)
|
|
||||||
avg_gain = gain.rolling(window=14, min_periods=14).mean()
|
|
||||||
avg_loss = loss.rolling(window=14, min_periods=14).mean()
|
|
||||||
rs = avg_gain / avg_loss.replace(0, pd.NA)
|
|
||||||
df['rsi_14'] = 100 - (100 / (1 + rs))
|
|
||||||
# MACD (12,26,9)
|
# MACD (12,26,9)
|
||||||
ema12 = close.ewm(span=12, adjust=False).mean()
|
macd, macd_signal, macd_hist = talib.MACD(close, fastperiod=12, slowperiod=26, signalperiod=9)
|
||||||
ema26 = close.ewm(span=26, adjust=False).mean()
|
df['macd'] = macd
|
||||||
macd_line = ema12 - ema26
|
df['macd_signal'] = macd_signal
|
||||||
df['macd'] = macd_line
|
df['macd_hist'] = macd_hist
|
||||||
df['macd_signal'] = macd_line.ewm(span=9, adjust=False).mean()
|
|
||||||
df['macd_hist'] = df['macd'] - df['macd_signal']
|
|
||||||
# Bollinger Bands (20,2)
|
# Bollinger Bands (20,2)
|
||||||
df['bb_middle'] = close.rolling(window=20, min_periods=20).mean()
|
bb_upper, bb_middle, bb_lower = talib.BBANDS(close, timeperiod=20, nbdevup=2, nbdevdn=2, matype=0)
|
||||||
bb_std = close.rolling(window=20, min_periods=20).std()
|
df['bb_upper'] = bb_upper
|
||||||
df['bb_upper'] = df['bb_middle'] + 2 * bb_std
|
df['bb_middle'] = bb_middle
|
||||||
df['bb_lower'] = df['bb_middle'] - 2 * bb_std
|
df['bb_lower'] = bb_lower
|
||||||
# Bollinger Squeeze: detect when BB width is lowest over 20 periods:contentReference[oaicite:4]{index=4}
|
|
||||||
bb_width = df['bb_upper'] - df['bb_lower']
|
# Bollinger Squeeze
|
||||||
rolling_min_width = bb_width.rolling(window=20, min_periods=20).min()
|
bb_width = bb_upper - bb_lower
|
||||||
df['bb_squeeze'] = (bb_width <= rolling_min_width).astype(int)
|
bb_width_series = pd.Series(bb_width)
|
||||||
# Volume moving average (20)
|
rolling_min_width = bb_width_series.rolling(window=20, min_periods=20).min()
|
||||||
df['volume_ma_20'] = df['volume'].rolling(window=20, min_periods=1).mean()
|
df['bb_squeeze'] = (bb_width_series <= rolling_min_width).fillna(0).astype(int)
|
||||||
|
|
||||||
|
# Volume MA
|
||||||
|
df['volume_ma_20'] = talib.SMA(volume, timeperiod=20)
|
||||||
|
|
||||||
return df
|
return df
|
||||||
|
|
||||||
# ========== Health Check Server ==========
|
# ========== Health Check Server ==========
|
||||||
|
|||||||
196
analysis/backfill_indicators.py
Executable file
196
analysis/backfill_indicators.py
Executable file
@@ -0,0 +1,196 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Backfill Missing Indicators
|
||||||
|
Calculates RSI and Bollinger Bands for existing data
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
import talib
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
def backfill_indicators(candles_db: str, analysis_db: str):
|
||||||
|
"""Backfill RSI and Bollinger Bands for all timeframes"""
|
||||||
|
|
||||||
|
print("🔧 BACKFILLING MISSING INDICATORS")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
# Connect to databases
|
||||||
|
conn_candles = sqlite3.connect(candles_db)
|
||||||
|
conn_analysis = sqlite3.connect(analysis_db)
|
||||||
|
|
||||||
|
# Get all timeframes
|
||||||
|
cursor = conn_analysis.cursor()
|
||||||
|
cursor.execute("SELECT DISTINCT timeframe FROM analysis ORDER BY timeframe")
|
||||||
|
timeframes = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
total_updated = 0
|
||||||
|
|
||||||
|
for timeframe in timeframes:
|
||||||
|
print(f"\n📊 Processing {timeframe}...")
|
||||||
|
|
||||||
|
# Fetch candle data
|
||||||
|
df_candles = pd.read_sql_query(
|
||||||
|
"SELECT timestamp, close, high, low FROM candles WHERE timeframe = ? ORDER BY timestamp",
|
||||||
|
conn_candles,
|
||||||
|
params=(timeframe,)
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(df_candles) < 20:
|
||||||
|
print(f" ⚠️ Skipping - insufficient data ({len(df_candles)} rows)")
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f" ✓ Loaded {len(df_candles)} candles")
|
||||||
|
|
||||||
|
# Calculate RSI
|
||||||
|
df_candles['rsi_14'] = talib.RSI(df_candles['close'].values, timeperiod=14)
|
||||||
|
|
||||||
|
# Calculate Bollinger Bands
|
||||||
|
bb_upper, bb_middle, bb_lower = talib.BBANDS(
|
||||||
|
df_candles['close'].values,
|
||||||
|
timeperiod=20,
|
||||||
|
nbdevup=2,
|
||||||
|
nbdevdn=2,
|
||||||
|
matype=0
|
||||||
|
)
|
||||||
|
|
||||||
|
df_candles['bb_upper'] = bb_upper
|
||||||
|
df_candles['bb_middle'] = bb_middle
|
||||||
|
df_candles['bb_lower'] = bb_lower
|
||||||
|
|
||||||
|
# Calculate BB Squeeze
|
||||||
|
# Squeeze = when BB width is in the lowest 20% of recent widths
|
||||||
|
df_candles['bb_width'] = df_candles['bb_upper'] - df_candles['bb_lower']
|
||||||
|
df_candles['bb_width_rank'] = df_candles['bb_width'].rolling(window=100).apply(
|
||||||
|
lambda x: (x.iloc[-1] <= x.quantile(0.2)).astype(int) if len(x) >= 20 else 0,
|
||||||
|
raw=False
|
||||||
|
)
|
||||||
|
df_candles['bb_squeeze'] = df_candles['bb_width_rank'].fillna(0).astype(int)
|
||||||
|
|
||||||
|
# Update analysis database
|
||||||
|
cursor_update = conn_analysis.cursor()
|
||||||
|
updated = 0
|
||||||
|
|
||||||
|
for _, row in df_candles.iterrows():
|
||||||
|
cursor_update.execute("""
|
||||||
|
UPDATE analysis
|
||||||
|
SET rsi_14 = ?, bb_upper = ?, bb_middle = ?, bb_lower = ?, bb_squeeze = ?
|
||||||
|
WHERE timeframe = ? AND timestamp = ?
|
||||||
|
""", (
|
||||||
|
float(row['rsi_14']) if not pd.isna(row['rsi_14']) else None,
|
||||||
|
float(row['bb_upper']) if not pd.isna(row['bb_upper']) else None,
|
||||||
|
float(row['bb_middle']) if not pd.isna(row['bb_middle']) else None,
|
||||||
|
float(row['bb_lower']) if not pd.isna(row['bb_lower']) else None,
|
||||||
|
int(row['bb_squeeze']),
|
||||||
|
timeframe,
|
||||||
|
int(row['timestamp'])
|
||||||
|
))
|
||||||
|
updated += cursor_update.rowcount
|
||||||
|
|
||||||
|
conn_analysis.commit()
|
||||||
|
total_updated += updated
|
||||||
|
|
||||||
|
print(f" ✅ Updated {updated} rows")
|
||||||
|
|
||||||
|
# Show sample
|
||||||
|
latest = df_candles.iloc[-1]
|
||||||
|
print(f" Latest RSI: {latest['rsi_14']:.2f}" if not pd.isna(latest['rsi_14']) else " Latest RSI: NULL")
|
||||||
|
print(f" Latest BB: Upper=${latest['bb_upper']:.2f}, Lower=${latest['bb_lower']:.2f}" if not pd.isna(latest['bb_upper']) else " Latest BB: NULL")
|
||||||
|
|
||||||
|
conn_candles.close()
|
||||||
|
conn_analysis.close()
|
||||||
|
|
||||||
|
print(f"\n{'='*70}")
|
||||||
|
print(f"✅ BACKFILL COMPLETE!")
|
||||||
|
print(f" Total rows updated: {total_updated}")
|
||||||
|
print(f"{'='*70}")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_backfill(analysis_db: str):
|
||||||
|
"""Verify the backfill worked"""
|
||||||
|
print("\n🔍 VERIFICATION")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
conn = sqlite3.connect(analysis_db)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
cursor.execute("SELECT DISTINCT timeframe FROM analysis")
|
||||||
|
timeframes = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
for tf in timeframes:
|
||||||
|
# Count NULL values
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as total,
|
||||||
|
SUM(CASE WHEN rsi_14 IS NULL THEN 1 ELSE 0 END) as rsi_null,
|
||||||
|
SUM(CASE WHEN bb_upper IS NULL THEN 1 ELSE 0 END) as bb_null
|
||||||
|
FROM analysis
|
||||||
|
WHERE timeframe = ?
|
||||||
|
""", (tf,))
|
||||||
|
|
||||||
|
total, rsi_null, bb_null = cursor.fetchone()
|
||||||
|
|
||||||
|
print(f"\n{tf}:")
|
||||||
|
print(f" Total rows: {total}")
|
||||||
|
print(f" RSI NULL: {rsi_null} ({rsi_null/total*100:.1f}%)" if total > 0 else " RSI NULL: N/A")
|
||||||
|
print(f" BB NULL: {bb_null} ({bb_null/total*100:.1f}%)" if total > 0 else " BB NULL: N/A")
|
||||||
|
|
||||||
|
# Get latest values
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT rsi_14, bb_upper, bb_lower, bb_squeeze
|
||||||
|
FROM analysis
|
||||||
|
WHERE timeframe = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""", (tf,))
|
||||||
|
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row and row[0] is not None:
|
||||||
|
print(f" ✅ Latest: RSI={row[0]:.2f}, BB_upper=${row[1]:.2f}, BB_squeeze={row[3]}")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Latest values still NULL")
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
try:
|
||||||
|
with open("config.json", "r") as f:
|
||||||
|
config = json.load(f)
|
||||||
|
candles_db = config.get("candles_db", "../onramp/market_data.db")
|
||||||
|
analysis_db = config.get("analysis_db", "../analysis/analysis.db")
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("❌ config.json not found, using default paths")
|
||||||
|
candles_db = "../onramp/market_data.db"
|
||||||
|
analysis_db = "../analysis/analysis.db"
|
||||||
|
|
||||||
|
print(f"Candles DB: {candles_db}")
|
||||||
|
print(f"Analysis DB: {analysis_db}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
backfill_indicators(candles_db, analysis_db)
|
||||||
|
verify_backfill(analysis_db)
|
||||||
|
|
||||||
|
print("\n💡 NEXT STEPS:")
|
||||||
|
print("=" * 70)
|
||||||
|
print("1. Run the signal debugger again:")
|
||||||
|
print(" python3 signal_debugger.py")
|
||||||
|
print("\n2. Restart the signal generator:")
|
||||||
|
print(" pkill -f signals.py")
|
||||||
|
print(" ./signals.py")
|
||||||
|
print("\n3. Update your analysis pipeline to calculate these indicators")
|
||||||
|
print(" going forward so you don't need to backfill again")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n❌ Error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
158
signals/analysis_inspector.py
Executable file
158
signals/analysis_inspector.py
Executable file
@@ -0,0 +1,158 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Analysis Database Inspector
|
||||||
|
Check what indicators are actually populated
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def load_config():
|
||||||
|
with open("config.json", "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def inspect_database(db_path):
|
||||||
|
"""Inspect analysis database schema and data"""
|
||||||
|
print(f"\n📊 Inspecting: {db_path}")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Get table schema
|
||||||
|
cursor.execute("PRAGMA table_info(analysis)")
|
||||||
|
columns = cursor.fetchall()
|
||||||
|
|
||||||
|
print("\n📋 TABLE SCHEMA:")
|
||||||
|
print(f"{'Column Name':<20} {'Type':<15} {'Not Null':<10}")
|
||||||
|
print("-" * 50)
|
||||||
|
for col in columns:
|
||||||
|
print(f"{col[1]:<20} {col[2]:<15} {'YES' if col[3] else 'NO':<10}")
|
||||||
|
|
||||||
|
# Get row count
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM analysis")
|
||||||
|
total_rows = cursor.fetchone()[0]
|
||||||
|
print(f"\n📊 Total rows: {total_rows}")
|
||||||
|
|
||||||
|
# Check data availability per timeframe
|
||||||
|
cursor.execute("SELECT DISTINCT timeframe FROM analysis ORDER BY timeframe")
|
||||||
|
timeframes = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
print("\n⏱️ DATA BY TIMEFRAME:")
|
||||||
|
for tf in timeframes:
|
||||||
|
cursor.execute(f"SELECT COUNT(*) FROM analysis WHERE timeframe = ?", (tf,))
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
print(f" {tf}: {count} rows")
|
||||||
|
|
||||||
|
# Check for NULL values in key indicators
|
||||||
|
print("\n🔍 NULL VALUE CHECK (latest 10 rows per timeframe):")
|
||||||
|
|
||||||
|
indicator_cols = [
|
||||||
|
'ema_9', 'ema_21', 'sma_50', 'sma_200',
|
||||||
|
'rsi_14', 'macd', 'macd_signal', 'macd_hist',
|
||||||
|
'bb_upper', 'bb_middle', 'bb_lower', 'bb_squeeze',
|
||||||
|
'volume_ma_20'
|
||||||
|
]
|
||||||
|
|
||||||
|
for tf in timeframes:
|
||||||
|
print(f"\n Timeframe: {tf}")
|
||||||
|
|
||||||
|
# Get latest row
|
||||||
|
cursor.execute(f"""
|
||||||
|
SELECT * FROM analysis
|
||||||
|
WHERE timeframe = ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
""", (tf,))
|
||||||
|
|
||||||
|
row = cursor.fetchone()
|
||||||
|
col_names = [desc[0] for desc in cursor.description]
|
||||||
|
|
||||||
|
if row:
|
||||||
|
row_dict = dict(zip(col_names, row))
|
||||||
|
|
||||||
|
null_indicators = []
|
||||||
|
present_indicators = []
|
||||||
|
|
||||||
|
for ind in indicator_cols:
|
||||||
|
if ind in row_dict:
|
||||||
|
if row_dict[ind] is None:
|
||||||
|
null_indicators.append(ind)
|
||||||
|
else:
|
||||||
|
present_indicators.append(ind)
|
||||||
|
else:
|
||||||
|
null_indicators.append(f"{ind} (MISSING COLUMN)")
|
||||||
|
|
||||||
|
if present_indicators:
|
||||||
|
print(f" ✓ Present: {', '.join(present_indicators[:5])}")
|
||||||
|
if len(present_indicators) > 5:
|
||||||
|
print(f" {', '.join(present_indicators[5:])}")
|
||||||
|
|
||||||
|
if null_indicators:
|
||||||
|
print(f" ❌ NULL/Missing: {', '.join(null_indicators)}")
|
||||||
|
|
||||||
|
# Show sample values
|
||||||
|
print(f"\n Sample values from latest row:")
|
||||||
|
print(f" Timestamp: {row_dict.get('timestamp')}")
|
||||||
|
for ind in ['ema_9', 'ema_21', 'rsi_14', 'bb_upper']:
|
||||||
|
if ind in row_dict:
|
||||||
|
val = row_dict[ind]
|
||||||
|
if val is not None:
|
||||||
|
print(f" {ind}: {val}")
|
||||||
|
else:
|
||||||
|
print(f" {ind}: NULL")
|
||||||
|
else:
|
||||||
|
print(f" ❌ No data found")
|
||||||
|
|
||||||
|
# Check if buy_volume exists in candles
|
||||||
|
print("\n\n📊 Checking candles table for buy_volume...")
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='candles'")
|
||||||
|
if cursor.fetchone():
|
||||||
|
cursor.execute("PRAGMA table_info(candles)")
|
||||||
|
candles_cols = [col[1] for col in cursor.fetchall()]
|
||||||
|
|
||||||
|
if 'buy_volume' in candles_cols:
|
||||||
|
print(" ✓ buy_volume column exists in candles table")
|
||||||
|
|
||||||
|
# Check if it has data
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM candles WHERE buy_volume IS NOT NULL")
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
print(f" ✓ {count} rows with buy_volume data")
|
||||||
|
else:
|
||||||
|
print(" ❌ buy_volume column MISSING from candles table")
|
||||||
|
print(" Available columns:", ', '.join(candles_cols))
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
except sqlite3.OperationalError as e:
|
||||||
|
print(f" ❌ Database error: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ❌ Error: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config = load_config()
|
||||||
|
|
||||||
|
print("🔍 ANALYSIS DATABASE INSPECTOR")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
inspect_database(config["analysis_db"])
|
||||||
|
|
||||||
|
print("\n\n💡 NEXT STEPS:")
|
||||||
|
print("=" * 70)
|
||||||
|
print("If indicators are missing:")
|
||||||
|
print(" 1. Check your analysis pipeline is running")
|
||||||
|
print(" 2. Verify the analysis script calculates these indicators:")
|
||||||
|
print(" - rsi_14, bb_upper, bb_lower, bb_middle, bb_squeeze")
|
||||||
|
print(" 3. Re-run analysis on existing candle data")
|
||||||
|
print("\nIf buy_volume is missing:")
|
||||||
|
print(" 1. Update your candles table schema")
|
||||||
|
print(" 2. Modify your data ingestion to capture buy_volume")
|
||||||
|
print(" 3. Or set buy_volume = volume/2 as approximation")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
"personality": "scalping",
|
"personality": "scalping",
|
||||||
"timeframes": ["1m", "5m"],
|
"timeframes": ["1m", "5m"],
|
||||||
"lookback": 200,
|
"lookback": 200,
|
||||||
"min_confidence": 0.45,
|
"min_confidence": 0.40,
|
||||||
"cooldown_seconds": 30,
|
"cooldown_seconds": 30,
|
||||||
"weights": {
|
"weights": {
|
||||||
"scalping": {
|
"scalping": {
|
||||||
@@ -18,12 +18,12 @@
|
|||||||
"stoch": 0.2,
|
"stoch": 0.2,
|
||||||
"rsi": 0.2,
|
"rsi": 0.2,
|
||||||
"volume": 0.2,
|
"volume": 0.2,
|
||||||
"macd": 0.15
|
"macd": 0.3
|
||||||
},
|
},
|
||||||
"swing": {
|
"swing": {
|
||||||
"regime": 0.35,
|
"regime": 0.35,
|
||||||
"bb_squeeze": 0.25,
|
"bb_squeeze": 0.25,
|
||||||
"macd": 0.2,
|
"macd": 0.3,
|
||||||
"flow": 0.15,
|
"flow": 0.15,
|
||||||
"rsi": 0.05
|
"rsi": 0.05
|
||||||
}
|
}
|
||||||
|
|||||||
357
signals/signal_debugger.py
Executable file
357
signals/signal_debugger.py
Executable file
@@ -0,0 +1,357 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Signal Generator Debugger
|
||||||
|
Analyzes why signals aren't being generated
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
import talib
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def load_config():
|
||||||
|
with open("config.json", "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_data(candles_db, analysis_db, timeframe, lookback=200):
|
||||||
|
"""Fetch and enrich data exactly like signals.py does"""
|
||||||
|
try:
|
||||||
|
conn_c = sqlite3.connect(f"file:{candles_db}?mode=ro", uri=True, timeout=10)
|
||||||
|
conn_c.execute(f"ATTACH DATABASE 'file:{analysis_db}?mode=ro' AS analysis_db")
|
||||||
|
|
||||||
|
query = """
|
||||||
|
SELECT
|
||||||
|
c.timeframe, c.timestamp, c.open, c.high, c.low, c.close,
|
||||||
|
c.volume, c.buy_volume,
|
||||||
|
a.ema_9, a.ema_21, a.sma_50, a.sma_200,
|
||||||
|
a.rsi_14, a.macd, a.macd_signal, a.macd_hist,
|
||||||
|
a.bb_upper, a.bb_middle, a.bb_lower, a.bb_squeeze,
|
||||||
|
a.volume_ma_20
|
||||||
|
FROM candles c
|
||||||
|
JOIN analysis_db.analysis a
|
||||||
|
ON c.timeframe = a.timeframe
|
||||||
|
AND c.timestamp = a.timestamp
|
||||||
|
WHERE c.timeframe = ?
|
||||||
|
ORDER BY c.timestamp DESC
|
||||||
|
LIMIT ?
|
||||||
|
"""
|
||||||
|
|
||||||
|
df = pd.read_sql_query(query, conn_c, params=(timeframe, lookback))
|
||||||
|
conn_c.close()
|
||||||
|
|
||||||
|
if df.empty:
|
||||||
|
return None
|
||||||
|
|
||||||
|
df = df.sort_values("timestamp").reset_index(drop=True)
|
||||||
|
df["datetime"] = pd.to_datetime(df["timestamp"], unit="s")
|
||||||
|
|
||||||
|
# Filter closed candles
|
||||||
|
import time
|
||||||
|
current_time = int(time.time())
|
||||||
|
window = {"1m": 60, "5m": 300, "15m": 900, "1h": 3600}.get(timeframe, 60)
|
||||||
|
df = df[df["timestamp"] < (current_time - window)]
|
||||||
|
|
||||||
|
if len(df) < 50:
|
||||||
|
return None
|
||||||
|
|
||||||
|
df = df.dropna(subset=["open", "high", "low", "close", "volume"])
|
||||||
|
|
||||||
|
if len(df) < 50:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Add Stochastic
|
||||||
|
df["stoch_k"], df["stoch_d"] = talib.STOCH(
|
||||||
|
df["high"].values,
|
||||||
|
df["low"].values,
|
||||||
|
df["close"].values,
|
||||||
|
fastk_period=14,
|
||||||
|
slowk_period=3,
|
||||||
|
slowd_period=3,
|
||||||
|
)
|
||||||
|
|
||||||
|
df["buy_ratio"] = df["buy_volume"] / df["volume"].replace(0, np.nan)
|
||||||
|
df["net_flow"] = df["buy_volume"] - (df["volume"] - df["buy_volume"])
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching data: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_scalping(df, weights, min_confidence):
|
||||||
|
"""Analyze scalping signal generation"""
|
||||||
|
if len(df) < 21:
|
||||||
|
print(" ❌ Insufficient data for scalping (need 21+ rows)")
|
||||||
|
return
|
||||||
|
|
||||||
|
latest = df.iloc[-1]
|
||||||
|
prev = df.iloc[-2]
|
||||||
|
|
||||||
|
print(f"\n📊 SCALPING ANALYSIS ({len(df)} candles)")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
# Check for NULL values
|
||||||
|
required = ["ema_9", "ema_21", "rsi_14", "stoch_k", "stoch_d", "macd", "macd_signal"]
|
||||||
|
null_cols = [col for col in required if pd.isna(latest[col])]
|
||||||
|
|
||||||
|
if null_cols:
|
||||||
|
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print(" ✓ All required indicators present")
|
||||||
|
|
||||||
|
print(f"\n Latest candle: {latest['datetime']}")
|
||||||
|
print(f" Close: ${latest['close']:.2f}")
|
||||||
|
|
||||||
|
# EMA Analysis
|
||||||
|
print(f"\n EMA Crossover Check:")
|
||||||
|
print(f" Current: EMA9={latest['ema_9']:.2f} vs EMA21={latest['ema_21']:.2f}")
|
||||||
|
print(f" Previous: EMA9={prev['ema_9']:.2f} vs EMA21={prev['ema_21']:.2f}")
|
||||||
|
|
||||||
|
ema_cross_up = latest["ema_9"] > latest["ema_21"] and prev["ema_9"] <= prev["ema_21"]
|
||||||
|
ema_cross_down = latest["ema_9"] < latest["ema_21"] and prev["ema_9"] >= prev["ema_21"]
|
||||||
|
|
||||||
|
if ema_cross_up:
|
||||||
|
print(f" ✓ BULLISH CROSSOVER DETECTED!")
|
||||||
|
signal_type = "BUY"
|
||||||
|
score = weights["ema_cross"]
|
||||||
|
elif ema_cross_down:
|
||||||
|
print(f" ✓ BEARISH CROSSOVER DETECTED!")
|
||||||
|
signal_type = "SELL"
|
||||||
|
score = weights["ema_cross"]
|
||||||
|
else:
|
||||||
|
print(f" ❌ No crossover (EMA9 {'above' if latest['ema_9'] > latest['ema_21'] else 'below'} EMA21)")
|
||||||
|
|
||||||
|
# Show trend direction
|
||||||
|
ema_diff = latest["ema_9"] - latest["ema_21"]
|
||||||
|
prev_diff = prev["ema_9"] - prev["ema_21"]
|
||||||
|
trend = "converging" if abs(ema_diff) < abs(prev_diff) else "diverging"
|
||||||
|
print(f" EMAs are {trend} (diff: {ema_diff:.2f} vs prev: {prev_diff:.2f})")
|
||||||
|
return
|
||||||
|
|
||||||
|
# We have a crossover, check other indicators
|
||||||
|
print(f"\n Signal Type: {signal_type}")
|
||||||
|
print(f" Base Score: {score:.3f} (from EMA crossover)")
|
||||||
|
|
||||||
|
# Stochastic
|
||||||
|
print(f"\n Stochastic:")
|
||||||
|
print(f" K={latest['stoch_k']:.1f}, D={latest['stoch_d']:.1f}")
|
||||||
|
|
||||||
|
if signal_type == "BUY":
|
||||||
|
if latest["stoch_k"] > latest["stoch_d"] and latest["stoch_k"] < 30:
|
||||||
|
score += weights["stoch"]
|
||||||
|
print(f" ✓ Oversold crossover (+{weights['stoch']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Not oversold crossover (K>D: {latest['stoch_k'] > latest['stoch_d']}, K<30: {latest['stoch_k'] < 30})")
|
||||||
|
else:
|
||||||
|
if latest["stoch_k"] < latest["stoch_d"] and latest["stoch_k"] > 70:
|
||||||
|
score += weights["stoch"]
|
||||||
|
print(f" ✓ Overbought crossover (+{weights['stoch']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Not overbought crossover (K<D: {latest['stoch_k'] < latest['stoch_d']}, K>70: {latest['stoch_k'] > 70})")
|
||||||
|
|
||||||
|
# RSI
|
||||||
|
print(f"\n RSI: {latest['rsi_14']:.1f}")
|
||||||
|
|
||||||
|
if signal_type == "BUY" and latest["rsi_14"] < 40:
|
||||||
|
score += weights["rsi"]
|
||||||
|
print(f" ✓ Undersold (+{weights['rsi']:.3f})")
|
||||||
|
elif signal_type == "SELL" and latest["rsi_14"] > 60:
|
||||||
|
score += weights["rsi"]
|
||||||
|
print(f" ✓ Oversold (+{weights['rsi']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Not in range (BUY needs <40, SELL needs >60)")
|
||||||
|
|
||||||
|
# Volume
|
||||||
|
vol_ratio = latest["volume"] / latest["volume_ma_20"] if latest["volume_ma_20"] else 0
|
||||||
|
print(f"\n Volume: {latest['volume']:.2f} vs MA20: {latest['volume_ma_20']:.2f}")
|
||||||
|
print(f" Ratio: {vol_ratio:.2f}x")
|
||||||
|
|
||||||
|
if vol_ratio > 1.5:
|
||||||
|
score += weights["volume"]
|
||||||
|
print(f" ✓ Volume surge (+{weights['volume']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ No surge (need >1.5x)")
|
||||||
|
|
||||||
|
# MACD
|
||||||
|
print(f"\n MACD: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
|
||||||
|
|
||||||
|
if signal_type == "BUY" and latest["macd"] > latest["macd_signal"]:
|
||||||
|
score += weights["macd"]
|
||||||
|
print(f" ✓ Bullish (+{weights['macd']:.3f})")
|
||||||
|
elif signal_type == "SELL" and latest["macd"] < latest["macd_signal"]:
|
||||||
|
score += weights["macd"]
|
||||||
|
print(f" ✓ Bearish (+{weights['macd']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Not aligned")
|
||||||
|
|
||||||
|
# Final score
|
||||||
|
print(f"\n {'='*70}")
|
||||||
|
print(f" FINAL SCORE: {score:.3f}")
|
||||||
|
print(f" THRESHOLD: {min_confidence:.3f}")
|
||||||
|
|
||||||
|
if score >= min_confidence:
|
||||||
|
print(f" ✅ SIGNAL WOULD BE GENERATED!")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_swing(df, weights, min_confidence):
|
||||||
|
"""Analyze swing signal generation"""
|
||||||
|
if len(df) < 200:
|
||||||
|
print(f" ❌ Insufficient data for swing (need 200+ rows, have {len(df)})")
|
||||||
|
return
|
||||||
|
|
||||||
|
latest = df.iloc[-1]
|
||||||
|
prev = df.iloc[-2]
|
||||||
|
|
||||||
|
print(f"\n📊 SWING ANALYSIS ({len(df)} candles)")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
# Check for NULL values
|
||||||
|
required = ["sma_50", "sma_200", "bb_upper", "bb_lower", "bb_squeeze", "macd", "macd_signal", "buy_ratio"]
|
||||||
|
null_cols = [col for col in required if pd.isna(latest[col])]
|
||||||
|
|
||||||
|
if null_cols:
|
||||||
|
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
print(" ✓ All required indicators present")
|
||||||
|
|
||||||
|
print(f"\n Latest candle: {latest['datetime']}")
|
||||||
|
print(f" Close: ${latest['close']:.2f}")
|
||||||
|
|
||||||
|
# Regime Analysis
|
||||||
|
print(f"\n Regime Analysis:")
|
||||||
|
print(f" Price: ${latest['close']:.2f}")
|
||||||
|
print(f" SMA50: ${latest['sma_50']:.2f}")
|
||||||
|
print(f" SMA200: ${latest['sma_200']:.2f}")
|
||||||
|
|
||||||
|
bull_regime = latest["close"] > latest["sma_50"] > latest["sma_200"]
|
||||||
|
bear_regime = latest["close"] < latest["sma_50"] < latest["sma_200"]
|
||||||
|
|
||||||
|
score = 0
|
||||||
|
signal_type = None
|
||||||
|
|
||||||
|
if bull_regime:
|
||||||
|
signal_type = "BUY"
|
||||||
|
score += weights["regime"]
|
||||||
|
print(f" ✓ BULL REGIME (Price > SMA50 > SMA200) (+{weights['regime']:.3f})")
|
||||||
|
elif bear_regime:
|
||||||
|
signal_type = "SELL"
|
||||||
|
score += weights["regime"]
|
||||||
|
print(f" ✓ BEAR REGIME (Price < SMA50 < SMA200) (+{weights['regime']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ No clear regime")
|
||||||
|
print(f" Price vs SMA50: {'above' if latest['close'] > latest['sma_50'] else 'below'}")
|
||||||
|
print(f" SMA50 vs SMA200: {'above' if latest['sma_50'] > latest['sma_200'] else 'below'}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\n Signal Type: {signal_type}")
|
||||||
|
print(f" Base Score: {score:.3f} (from regime)")
|
||||||
|
|
||||||
|
# BB Squeeze
|
||||||
|
print(f"\n Bollinger Bands:")
|
||||||
|
print(f" Squeeze: {latest['bb_squeeze']} (prev: {prev['bb_squeeze']})")
|
||||||
|
print(f" Upper: ${latest['bb_upper']:.2f}, Lower: ${latest['bb_lower']:.2f}")
|
||||||
|
|
||||||
|
if latest["bb_squeeze"] == 1 or prev["bb_squeeze"] == 1:
|
||||||
|
if signal_type == "BUY" and latest["close"] > latest["bb_upper"]:
|
||||||
|
score += weights["bb_squeeze"]
|
||||||
|
print(f" ✓ Squeeze breakout upside (+{weights['bb_squeeze']:.3f})")
|
||||||
|
elif signal_type == "SELL" and latest["close"] < latest["bb_lower"]:
|
||||||
|
score += weights["bb_squeeze"]
|
||||||
|
print(f" ✓ Squeeze breakout downside (+{weights['bb_squeeze']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Squeeze present but no breakout")
|
||||||
|
else:
|
||||||
|
print(f" ❌ No squeeze")
|
||||||
|
|
||||||
|
# MACD
|
||||||
|
print(f"\n MACD:")
|
||||||
|
print(f" Current: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
|
||||||
|
print(f" Previous: {prev['macd']:.2f} vs Signal: {prev['macd_signal']:.2f}")
|
||||||
|
|
||||||
|
macd_cross_up = latest["macd"] > latest["macd_signal"] and prev["macd"] <= prev["macd_signal"]
|
||||||
|
macd_cross_down = latest["macd"] < latest["macd_signal"] and prev["macd"] >= prev["macd_signal"]
|
||||||
|
|
||||||
|
if signal_type == "BUY" and macd_cross_up:
|
||||||
|
score += weights["macd"]
|
||||||
|
print(f" ✓ Bullish crossover (+{weights['macd']:.3f})")
|
||||||
|
elif signal_type == "SELL" and macd_cross_down:
|
||||||
|
score += weights["macd"]
|
||||||
|
print(f" ✓ Bearish crossover (+{weights['macd']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ No crossover or not aligned")
|
||||||
|
|
||||||
|
# Net flow
|
||||||
|
print(f"\n Buy/Sell Pressure:")
|
||||||
|
print(f" Buy Ratio: {latest['buy_ratio']:.2%}")
|
||||||
|
|
||||||
|
if signal_type == "BUY" and latest["buy_ratio"] > 0.55:
|
||||||
|
score += weights["flow"]
|
||||||
|
print(f" ✓ Strong buy pressure (+{weights['flow']:.3f})")
|
||||||
|
elif signal_type == "SELL" and latest["buy_ratio"] < 0.45:
|
||||||
|
score += weights["flow"]
|
||||||
|
print(f" ✓ Strong sell pressure (+{weights['flow']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Neutral pressure")
|
||||||
|
|
||||||
|
# RSI
|
||||||
|
print(f"\n RSI: {latest['rsi_14']:.1f}")
|
||||||
|
|
||||||
|
if signal_type == "BUY" and latest["rsi_14"] < 50:
|
||||||
|
score += weights["rsi"]
|
||||||
|
print(f" ✓ Not overbought (+{weights['rsi']:.3f})")
|
||||||
|
elif signal_type == "SELL" and latest["rsi_14"] > 50:
|
||||||
|
score += weights["rsi"]
|
||||||
|
print(f" ✓ Not oversold (+{weights['rsi']:.3f})")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Unfavorable")
|
||||||
|
|
||||||
|
# Final score
|
||||||
|
print(f"\n {'='*70}")
|
||||||
|
print(f" FINAL SCORE: {score:.3f}")
|
||||||
|
print(f" THRESHOLD: {min_confidence:.3f}")
|
||||||
|
|
||||||
|
if score >= min_confidence:
|
||||||
|
print(f" ✅ SIGNAL WOULD BE GENERATED!")
|
||||||
|
else:
|
||||||
|
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config = load_config()
|
||||||
|
|
||||||
|
print("🔍 SIGNAL GENERATOR DEBUGGER")
|
||||||
|
print("=" * 70)
|
||||||
|
print(f"Min Confidence: {config['min_confidence']}")
|
||||||
|
print(f"Timeframes: {', '.join(config['timeframes'])}")
|
||||||
|
print(f"Lookback: {config['lookback']} candles")
|
||||||
|
|
||||||
|
for timeframe in config["timeframes"]:
|
||||||
|
print(f"\n\n{'='*70}")
|
||||||
|
print(f"TIMEFRAME: {timeframe}")
|
||||||
|
print(f"{'='*70}")
|
||||||
|
|
||||||
|
df = fetch_data(config["candles_db"], config["analysis_db"], timeframe, config["lookback"])
|
||||||
|
|
||||||
|
if df is None:
|
||||||
|
print(f" ❌ No data available")
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f" ✓ Loaded {len(df)} candles")
|
||||||
|
|
||||||
|
# Analyze both personalities
|
||||||
|
analyze_scalping(df, config["weights"]["scalping"], config["min_confidence"])
|
||||||
|
analyze_swing(df, config["weights"]["swing"], config["min_confidence"])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user