Lowered signal tresholds so low that we got signals flowing. Few debug scripts to see way signals were not firing. Fix analyst.py indicator calculation to use TA-lib.
This commit is contained in:
158
signals/analysis_inspector.py
Executable file
158
signals/analysis_inspector.py
Executable file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Analysis Database Inspector
|
||||
Check what indicators are actually populated
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
|
||||
def load_config():
|
||||
with open("config.json", "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def inspect_database(db_path):
|
||||
"""Inspect analysis database schema and data"""
|
||||
print(f"\n📊 Inspecting: {db_path}")
|
||||
print("=" * 70)
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get table schema
|
||||
cursor.execute("PRAGMA table_info(analysis)")
|
||||
columns = cursor.fetchall()
|
||||
|
||||
print("\n📋 TABLE SCHEMA:")
|
||||
print(f"{'Column Name':<20} {'Type':<15} {'Not Null':<10}")
|
||||
print("-" * 50)
|
||||
for col in columns:
|
||||
print(f"{col[1]:<20} {col[2]:<15} {'YES' if col[3] else 'NO':<10}")
|
||||
|
||||
# Get row count
|
||||
cursor.execute("SELECT COUNT(*) FROM analysis")
|
||||
total_rows = cursor.fetchone()[0]
|
||||
print(f"\n📊 Total rows: {total_rows}")
|
||||
|
||||
# Check data availability per timeframe
|
||||
cursor.execute("SELECT DISTINCT timeframe FROM analysis ORDER BY timeframe")
|
||||
timeframes = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
print("\n⏱️ DATA BY TIMEFRAME:")
|
||||
for tf in timeframes:
|
||||
cursor.execute(f"SELECT COUNT(*) FROM analysis WHERE timeframe = ?", (tf,))
|
||||
count = cursor.fetchone()[0]
|
||||
print(f" {tf}: {count} rows")
|
||||
|
||||
# Check for NULL values in key indicators
|
||||
print("\n🔍 NULL VALUE CHECK (latest 10 rows per timeframe):")
|
||||
|
||||
indicator_cols = [
|
||||
'ema_9', 'ema_21', 'sma_50', 'sma_200',
|
||||
'rsi_14', 'macd', 'macd_signal', 'macd_hist',
|
||||
'bb_upper', 'bb_middle', 'bb_lower', 'bb_squeeze',
|
||||
'volume_ma_20'
|
||||
]
|
||||
|
||||
for tf in timeframes:
|
||||
print(f"\n Timeframe: {tf}")
|
||||
|
||||
# Get latest row
|
||||
cursor.execute(f"""
|
||||
SELECT * FROM analysis
|
||||
WHERE timeframe = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT 1
|
||||
""", (tf,))
|
||||
|
||||
row = cursor.fetchone()
|
||||
col_names = [desc[0] for desc in cursor.description]
|
||||
|
||||
if row:
|
||||
row_dict = dict(zip(col_names, row))
|
||||
|
||||
null_indicators = []
|
||||
present_indicators = []
|
||||
|
||||
for ind in indicator_cols:
|
||||
if ind in row_dict:
|
||||
if row_dict[ind] is None:
|
||||
null_indicators.append(ind)
|
||||
else:
|
||||
present_indicators.append(ind)
|
||||
else:
|
||||
null_indicators.append(f"{ind} (MISSING COLUMN)")
|
||||
|
||||
if present_indicators:
|
||||
print(f" ✓ Present: {', '.join(present_indicators[:5])}")
|
||||
if len(present_indicators) > 5:
|
||||
print(f" {', '.join(present_indicators[5:])}")
|
||||
|
||||
if null_indicators:
|
||||
print(f" ❌ NULL/Missing: {', '.join(null_indicators)}")
|
||||
|
||||
# Show sample values
|
||||
print(f"\n Sample values from latest row:")
|
||||
print(f" Timestamp: {row_dict.get('timestamp')}")
|
||||
for ind in ['ema_9', 'ema_21', 'rsi_14', 'bb_upper']:
|
||||
if ind in row_dict:
|
||||
val = row_dict[ind]
|
||||
if val is not None:
|
||||
print(f" {ind}: {val}")
|
||||
else:
|
||||
print(f" {ind}: NULL")
|
||||
else:
|
||||
print(f" ❌ No data found")
|
||||
|
||||
# Check if buy_volume exists in candles
|
||||
print("\n\n📊 Checking candles table for buy_volume...")
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='candles'")
|
||||
if cursor.fetchone():
|
||||
cursor.execute("PRAGMA table_info(candles)")
|
||||
candles_cols = [col[1] for col in cursor.fetchall()]
|
||||
|
||||
if 'buy_volume' in candles_cols:
|
||||
print(" ✓ buy_volume column exists in candles table")
|
||||
|
||||
# Check if it has data
|
||||
cursor.execute("SELECT COUNT(*) FROM candles WHERE buy_volume IS NOT NULL")
|
||||
count = cursor.fetchone()[0]
|
||||
print(f" ✓ {count} rows with buy_volume data")
|
||||
else:
|
||||
print(" ❌ buy_volume column MISSING from candles table")
|
||||
print(" Available columns:", ', '.join(candles_cols))
|
||||
|
||||
conn.close()
|
||||
|
||||
except sqlite3.OperationalError as e:
|
||||
print(f" ❌ Database error: {e}")
|
||||
except Exception as e:
|
||||
print(f" ❌ Error: {e}")
|
||||
|
||||
|
||||
def main():
|
||||
config = load_config()
|
||||
|
||||
print("🔍 ANALYSIS DATABASE INSPECTOR")
|
||||
print("=" * 70)
|
||||
|
||||
inspect_database(config["analysis_db"])
|
||||
|
||||
print("\n\n💡 NEXT STEPS:")
|
||||
print("=" * 70)
|
||||
print("If indicators are missing:")
|
||||
print(" 1. Check your analysis pipeline is running")
|
||||
print(" 2. Verify the analysis script calculates these indicators:")
|
||||
print(" - rsi_14, bb_upper, bb_lower, bb_middle, bb_squeeze")
|
||||
print(" 3. Re-run analysis on existing candle data")
|
||||
print("\nIf buy_volume is missing:")
|
||||
print(" 1. Update your candles table schema")
|
||||
print(" 2. Modify your data ingestion to capture buy_volume")
|
||||
print(" 3. Or set buy_volume = volume/2 as approximation")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -10,7 +10,7 @@
|
||||
"personality": "scalping",
|
||||
"timeframes": ["1m", "5m"],
|
||||
"lookback": 200,
|
||||
"min_confidence": 0.45,
|
||||
"min_confidence": 0.40,
|
||||
"cooldown_seconds": 30,
|
||||
"weights": {
|
||||
"scalping": {
|
||||
@@ -18,12 +18,12 @@
|
||||
"stoch": 0.2,
|
||||
"rsi": 0.2,
|
||||
"volume": 0.2,
|
||||
"macd": 0.15
|
||||
"macd": 0.3
|
||||
},
|
||||
"swing": {
|
||||
"regime": 0.35,
|
||||
"bb_squeeze": 0.25,
|
||||
"macd": 0.2,
|
||||
"macd": 0.3,
|
||||
"flow": 0.15,
|
||||
"rsi": 0.05
|
||||
}
|
||||
|
||||
357
signals/signal_debugger.py
Executable file
357
signals/signal_debugger.py
Executable file
@@ -0,0 +1,357 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Signal Generator Debugger
|
||||
Analyzes why signals aren't being generated
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import talib
|
||||
import json
|
||||
from datetime import datetime
|
||||
import sys
|
||||
|
||||
|
||||
def load_config():
|
||||
with open("config.json", "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def fetch_data(candles_db, analysis_db, timeframe, lookback=200):
|
||||
"""Fetch and enrich data exactly like signals.py does"""
|
||||
try:
|
||||
conn_c = sqlite3.connect(f"file:{candles_db}?mode=ro", uri=True, timeout=10)
|
||||
conn_c.execute(f"ATTACH DATABASE 'file:{analysis_db}?mode=ro' AS analysis_db")
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
c.timeframe, c.timestamp, c.open, c.high, c.low, c.close,
|
||||
c.volume, c.buy_volume,
|
||||
a.ema_9, a.ema_21, a.sma_50, a.sma_200,
|
||||
a.rsi_14, a.macd, a.macd_signal, a.macd_hist,
|
||||
a.bb_upper, a.bb_middle, a.bb_lower, a.bb_squeeze,
|
||||
a.volume_ma_20
|
||||
FROM candles c
|
||||
JOIN analysis_db.analysis a
|
||||
ON c.timeframe = a.timeframe
|
||||
AND c.timestamp = a.timestamp
|
||||
WHERE c.timeframe = ?
|
||||
ORDER BY c.timestamp DESC
|
||||
LIMIT ?
|
||||
"""
|
||||
|
||||
df = pd.read_sql_query(query, conn_c, params=(timeframe, lookback))
|
||||
conn_c.close()
|
||||
|
||||
if df.empty:
|
||||
return None
|
||||
|
||||
df = df.sort_values("timestamp").reset_index(drop=True)
|
||||
df["datetime"] = pd.to_datetime(df["timestamp"], unit="s")
|
||||
|
||||
# Filter closed candles
|
||||
import time
|
||||
current_time = int(time.time())
|
||||
window = {"1m": 60, "5m": 300, "15m": 900, "1h": 3600}.get(timeframe, 60)
|
||||
df = df[df["timestamp"] < (current_time - window)]
|
||||
|
||||
if len(df) < 50:
|
||||
return None
|
||||
|
||||
df = df.dropna(subset=["open", "high", "low", "close", "volume"])
|
||||
|
||||
if len(df) < 50:
|
||||
return None
|
||||
|
||||
# Add Stochastic
|
||||
df["stoch_k"], df["stoch_d"] = talib.STOCH(
|
||||
df["high"].values,
|
||||
df["low"].values,
|
||||
df["close"].values,
|
||||
fastk_period=14,
|
||||
slowk_period=3,
|
||||
slowd_period=3,
|
||||
)
|
||||
|
||||
df["buy_ratio"] = df["buy_volume"] / df["volume"].replace(0, np.nan)
|
||||
df["net_flow"] = df["buy_volume"] - (df["volume"] - df["buy_volume"])
|
||||
|
||||
return df
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error fetching data: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def analyze_scalping(df, weights, min_confidence):
|
||||
"""Analyze scalping signal generation"""
|
||||
if len(df) < 21:
|
||||
print(" ❌ Insufficient data for scalping (need 21+ rows)")
|
||||
return
|
||||
|
||||
latest = df.iloc[-1]
|
||||
prev = df.iloc[-2]
|
||||
|
||||
print(f"\n📊 SCALPING ANALYSIS ({len(df)} candles)")
|
||||
print("=" * 70)
|
||||
|
||||
# Check for NULL values
|
||||
required = ["ema_9", "ema_21", "rsi_14", "stoch_k", "stoch_d", "macd", "macd_signal"]
|
||||
null_cols = [col for col in required if pd.isna(latest[col])]
|
||||
|
||||
if null_cols:
|
||||
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
|
||||
return
|
||||
else:
|
||||
print(" ✓ All required indicators present")
|
||||
|
||||
print(f"\n Latest candle: {latest['datetime']}")
|
||||
print(f" Close: ${latest['close']:.2f}")
|
||||
|
||||
# EMA Analysis
|
||||
print(f"\n EMA Crossover Check:")
|
||||
print(f" Current: EMA9={latest['ema_9']:.2f} vs EMA21={latest['ema_21']:.2f}")
|
||||
print(f" Previous: EMA9={prev['ema_9']:.2f} vs EMA21={prev['ema_21']:.2f}")
|
||||
|
||||
ema_cross_up = latest["ema_9"] > latest["ema_21"] and prev["ema_9"] <= prev["ema_21"]
|
||||
ema_cross_down = latest["ema_9"] < latest["ema_21"] and prev["ema_9"] >= prev["ema_21"]
|
||||
|
||||
if ema_cross_up:
|
||||
print(f" ✓ BULLISH CROSSOVER DETECTED!")
|
||||
signal_type = "BUY"
|
||||
score = weights["ema_cross"]
|
||||
elif ema_cross_down:
|
||||
print(f" ✓ BEARISH CROSSOVER DETECTED!")
|
||||
signal_type = "SELL"
|
||||
score = weights["ema_cross"]
|
||||
else:
|
||||
print(f" ❌ No crossover (EMA9 {'above' if latest['ema_9'] > latest['ema_21'] else 'below'} EMA21)")
|
||||
|
||||
# Show trend direction
|
||||
ema_diff = latest["ema_9"] - latest["ema_21"]
|
||||
prev_diff = prev["ema_9"] - prev["ema_21"]
|
||||
trend = "converging" if abs(ema_diff) < abs(prev_diff) else "diverging"
|
||||
print(f" EMAs are {trend} (diff: {ema_diff:.2f} vs prev: {prev_diff:.2f})")
|
||||
return
|
||||
|
||||
# We have a crossover, check other indicators
|
||||
print(f"\n Signal Type: {signal_type}")
|
||||
print(f" Base Score: {score:.3f} (from EMA crossover)")
|
||||
|
||||
# Stochastic
|
||||
print(f"\n Stochastic:")
|
||||
print(f" K={latest['stoch_k']:.1f}, D={latest['stoch_d']:.1f}")
|
||||
|
||||
if signal_type == "BUY":
|
||||
if latest["stoch_k"] > latest["stoch_d"] and latest["stoch_k"] < 30:
|
||||
score += weights["stoch"]
|
||||
print(f" ✓ Oversold crossover (+{weights['stoch']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Not oversold crossover (K>D: {latest['stoch_k'] > latest['stoch_d']}, K<30: {latest['stoch_k'] < 30})")
|
||||
else:
|
||||
if latest["stoch_k"] < latest["stoch_d"] and latest["stoch_k"] > 70:
|
||||
score += weights["stoch"]
|
||||
print(f" ✓ Overbought crossover (+{weights['stoch']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Not overbought crossover (K<D: {latest['stoch_k'] < latest['stoch_d']}, K>70: {latest['stoch_k'] > 70})")
|
||||
|
||||
# RSI
|
||||
print(f"\n RSI: {latest['rsi_14']:.1f}")
|
||||
|
||||
if signal_type == "BUY" and latest["rsi_14"] < 40:
|
||||
score += weights["rsi"]
|
||||
print(f" ✓ Undersold (+{weights['rsi']:.3f})")
|
||||
elif signal_type == "SELL" and latest["rsi_14"] > 60:
|
||||
score += weights["rsi"]
|
||||
print(f" ✓ Oversold (+{weights['rsi']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Not in range (BUY needs <40, SELL needs >60)")
|
||||
|
||||
# Volume
|
||||
vol_ratio = latest["volume"] / latest["volume_ma_20"] if latest["volume_ma_20"] else 0
|
||||
print(f"\n Volume: {latest['volume']:.2f} vs MA20: {latest['volume_ma_20']:.2f}")
|
||||
print(f" Ratio: {vol_ratio:.2f}x")
|
||||
|
||||
if vol_ratio > 1.5:
|
||||
score += weights["volume"]
|
||||
print(f" ✓ Volume surge (+{weights['volume']:.3f})")
|
||||
else:
|
||||
print(f" ❌ No surge (need >1.5x)")
|
||||
|
||||
# MACD
|
||||
print(f"\n MACD: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
|
||||
|
||||
if signal_type == "BUY" and latest["macd"] > latest["macd_signal"]:
|
||||
score += weights["macd"]
|
||||
print(f" ✓ Bullish (+{weights['macd']:.3f})")
|
||||
elif signal_type == "SELL" and latest["macd"] < latest["macd_signal"]:
|
||||
score += weights["macd"]
|
||||
print(f" ✓ Bearish (+{weights['macd']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Not aligned")
|
||||
|
||||
# Final score
|
||||
print(f"\n {'='*70}")
|
||||
print(f" FINAL SCORE: {score:.3f}")
|
||||
print(f" THRESHOLD: {min_confidence:.3f}")
|
||||
|
||||
if score >= min_confidence:
|
||||
print(f" ✅ SIGNAL WOULD BE GENERATED!")
|
||||
else:
|
||||
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
|
||||
|
||||
|
||||
def analyze_swing(df, weights, min_confidence):
|
||||
"""Analyze swing signal generation"""
|
||||
if len(df) < 200:
|
||||
print(f" ❌ Insufficient data for swing (need 200+ rows, have {len(df)})")
|
||||
return
|
||||
|
||||
latest = df.iloc[-1]
|
||||
prev = df.iloc[-2]
|
||||
|
||||
print(f"\n📊 SWING ANALYSIS ({len(df)} candles)")
|
||||
print("=" * 70)
|
||||
|
||||
# Check for NULL values
|
||||
required = ["sma_50", "sma_200", "bb_upper", "bb_lower", "bb_squeeze", "macd", "macd_signal", "buy_ratio"]
|
||||
null_cols = [col for col in required if pd.isna(latest[col])]
|
||||
|
||||
if null_cols:
|
||||
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
|
||||
return
|
||||
else:
|
||||
print(" ✓ All required indicators present")
|
||||
|
||||
print(f"\n Latest candle: {latest['datetime']}")
|
||||
print(f" Close: ${latest['close']:.2f}")
|
||||
|
||||
# Regime Analysis
|
||||
print(f"\n Regime Analysis:")
|
||||
print(f" Price: ${latest['close']:.2f}")
|
||||
print(f" SMA50: ${latest['sma_50']:.2f}")
|
||||
print(f" SMA200: ${latest['sma_200']:.2f}")
|
||||
|
||||
bull_regime = latest["close"] > latest["sma_50"] > latest["sma_200"]
|
||||
bear_regime = latest["close"] < latest["sma_50"] < latest["sma_200"]
|
||||
|
||||
score = 0
|
||||
signal_type = None
|
||||
|
||||
if bull_regime:
|
||||
signal_type = "BUY"
|
||||
score += weights["regime"]
|
||||
print(f" ✓ BULL REGIME (Price > SMA50 > SMA200) (+{weights['regime']:.3f})")
|
||||
elif bear_regime:
|
||||
signal_type = "SELL"
|
||||
score += weights["regime"]
|
||||
print(f" ✓ BEAR REGIME (Price < SMA50 < SMA200) (+{weights['regime']:.3f})")
|
||||
else:
|
||||
print(f" ❌ No clear regime")
|
||||
print(f" Price vs SMA50: {'above' if latest['close'] > latest['sma_50'] else 'below'}")
|
||||
print(f" SMA50 vs SMA200: {'above' if latest['sma_50'] > latest['sma_200'] else 'below'}")
|
||||
return
|
||||
|
||||
print(f"\n Signal Type: {signal_type}")
|
||||
print(f" Base Score: {score:.3f} (from regime)")
|
||||
|
||||
# BB Squeeze
|
||||
print(f"\n Bollinger Bands:")
|
||||
print(f" Squeeze: {latest['bb_squeeze']} (prev: {prev['bb_squeeze']})")
|
||||
print(f" Upper: ${latest['bb_upper']:.2f}, Lower: ${latest['bb_lower']:.2f}")
|
||||
|
||||
if latest["bb_squeeze"] == 1 or prev["bb_squeeze"] == 1:
|
||||
if signal_type == "BUY" and latest["close"] > latest["bb_upper"]:
|
||||
score += weights["bb_squeeze"]
|
||||
print(f" ✓ Squeeze breakout upside (+{weights['bb_squeeze']:.3f})")
|
||||
elif signal_type == "SELL" and latest["close"] < latest["bb_lower"]:
|
||||
score += weights["bb_squeeze"]
|
||||
print(f" ✓ Squeeze breakout downside (+{weights['bb_squeeze']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Squeeze present but no breakout")
|
||||
else:
|
||||
print(f" ❌ No squeeze")
|
||||
|
||||
# MACD
|
||||
print(f"\n MACD:")
|
||||
print(f" Current: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
|
||||
print(f" Previous: {prev['macd']:.2f} vs Signal: {prev['macd_signal']:.2f}")
|
||||
|
||||
macd_cross_up = latest["macd"] > latest["macd_signal"] and prev["macd"] <= prev["macd_signal"]
|
||||
macd_cross_down = latest["macd"] < latest["macd_signal"] and prev["macd"] >= prev["macd_signal"]
|
||||
|
||||
if signal_type == "BUY" and macd_cross_up:
|
||||
score += weights["macd"]
|
||||
print(f" ✓ Bullish crossover (+{weights['macd']:.3f})")
|
||||
elif signal_type == "SELL" and macd_cross_down:
|
||||
score += weights["macd"]
|
||||
print(f" ✓ Bearish crossover (+{weights['macd']:.3f})")
|
||||
else:
|
||||
print(f" ❌ No crossover or not aligned")
|
||||
|
||||
# Net flow
|
||||
print(f"\n Buy/Sell Pressure:")
|
||||
print(f" Buy Ratio: {latest['buy_ratio']:.2%}")
|
||||
|
||||
if signal_type == "BUY" and latest["buy_ratio"] > 0.55:
|
||||
score += weights["flow"]
|
||||
print(f" ✓ Strong buy pressure (+{weights['flow']:.3f})")
|
||||
elif signal_type == "SELL" and latest["buy_ratio"] < 0.45:
|
||||
score += weights["flow"]
|
||||
print(f" ✓ Strong sell pressure (+{weights['flow']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Neutral pressure")
|
||||
|
||||
# RSI
|
||||
print(f"\n RSI: {latest['rsi_14']:.1f}")
|
||||
|
||||
if signal_type == "BUY" and latest["rsi_14"] < 50:
|
||||
score += weights["rsi"]
|
||||
print(f" ✓ Not overbought (+{weights['rsi']:.3f})")
|
||||
elif signal_type == "SELL" and latest["rsi_14"] > 50:
|
||||
score += weights["rsi"]
|
||||
print(f" ✓ Not oversold (+{weights['rsi']:.3f})")
|
||||
else:
|
||||
print(f" ❌ Unfavorable")
|
||||
|
||||
# Final score
|
||||
print(f"\n {'='*70}")
|
||||
print(f" FINAL SCORE: {score:.3f}")
|
||||
print(f" THRESHOLD: {min_confidence:.3f}")
|
||||
|
||||
if score >= min_confidence:
|
||||
print(f" ✅ SIGNAL WOULD BE GENERATED!")
|
||||
else:
|
||||
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
|
||||
|
||||
|
||||
def main():
|
||||
config = load_config()
|
||||
|
||||
print("🔍 SIGNAL GENERATOR DEBUGGER")
|
||||
print("=" * 70)
|
||||
print(f"Min Confidence: {config['min_confidence']}")
|
||||
print(f"Timeframes: {', '.join(config['timeframes'])}")
|
||||
print(f"Lookback: {config['lookback']} candles")
|
||||
|
||||
for timeframe in config["timeframes"]:
|
||||
print(f"\n\n{'='*70}")
|
||||
print(f"TIMEFRAME: {timeframe}")
|
||||
print(f"{'='*70}")
|
||||
|
||||
df = fetch_data(config["candles_db"], config["analysis_db"], timeframe, config["lookback"])
|
||||
|
||||
if df is None:
|
||||
print(f" ❌ No data available")
|
||||
continue
|
||||
|
||||
print(f" ✓ Loaded {len(df)} candles")
|
||||
|
||||
# Analyze both personalities
|
||||
analyze_scalping(df, config["weights"]["scalping"], config["min_confidence"])
|
||||
analyze_swing(df, config["weights"]["swing"], config["min_confidence"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user