Compare commits

...

3 Commits

11 changed files with 1921 additions and 215 deletions

View File

@@ -121,38 +121,44 @@ analysis_conn.commit()
# ========== Technical Indicator Functions ==========
def compute_indicators(df):
close = df['close']
"""Compute indicators using TA-Lib for accuracy"""
import talib
close = df['close'].values
high = df['high'].values
low = df['low'].values
volume = df['volume'].values
# EMA and SMA
df['ema_9'] = close.ewm(span=9, adjust=False).mean()
df['ema_21'] = close.ewm(span=21, adjust=False).mean()
df['sma_50'] = close.rolling(window=50, min_periods=1).mean()
df['sma_200'] = close.rolling(window=200, min_periods=1).mean()
# RSI (14): using 14-period gains/losses and RSI formula (100 - 100/(1+RS)):contentReference[oaicite:3]{index=3}
delta = close.diff()
gain = delta.clip(lower=0)
loss = -delta.clip(upper=0)
avg_gain = gain.rolling(window=14, min_periods=14).mean()
avg_loss = loss.rolling(window=14, min_periods=14).mean()
rs = avg_gain / avg_loss.replace(0, pd.NA)
df['rsi_14'] = 100 - (100 / (1 + rs))
df['ema_9'] = talib.EMA(close, timeperiod=9)
df['ema_21'] = talib.EMA(close, timeperiod=21)
df['sma_50'] = talib.SMA(close, timeperiod=50)
df['sma_200'] = talib.SMA(close, timeperiod=200)
# RSI (14) - Proper calculation
df['rsi_14'] = talib.RSI(close, timeperiod=14)
# MACD (12,26,9)
ema12 = close.ewm(span=12, adjust=False).mean()
ema26 = close.ewm(span=26, adjust=False).mean()
macd_line = ema12 - ema26
df['macd'] = macd_line
df['macd_signal'] = macd_line.ewm(span=9, adjust=False).mean()
df['macd_hist'] = df['macd'] - df['macd_signal']
macd, macd_signal, macd_hist = talib.MACD(close, fastperiod=12, slowperiod=26, signalperiod=9)
df['macd'] = macd
df['macd_signal'] = macd_signal
df['macd_hist'] = macd_hist
# Bollinger Bands (20,2)
df['bb_middle'] = close.rolling(window=20, min_periods=20).mean()
bb_std = close.rolling(window=20, min_periods=20).std()
df['bb_upper'] = df['bb_middle'] + 2 * bb_std
df['bb_lower'] = df['bb_middle'] - 2 * bb_std
# Bollinger Squeeze: detect when BB width is lowest over 20 periods:contentReference[oaicite:4]{index=4}
bb_width = df['bb_upper'] - df['bb_lower']
rolling_min_width = bb_width.rolling(window=20, min_periods=20).min()
df['bb_squeeze'] = (bb_width <= rolling_min_width).astype(int)
# Volume moving average (20)
df['volume_ma_20'] = df['volume'].rolling(window=20, min_periods=1).mean()
bb_upper, bb_middle, bb_lower = talib.BBANDS(close, timeperiod=20, nbdevup=2, nbdevdn=2, matype=0)
df['bb_upper'] = bb_upper
df['bb_middle'] = bb_middle
df['bb_lower'] = bb_lower
# Bollinger Squeeze
bb_width = bb_upper - bb_lower
bb_width_series = pd.Series(bb_width)
rolling_min_width = bb_width_series.rolling(window=20, min_periods=20).min()
df['bb_squeeze'] = (bb_width_series <= rolling_min_width).fillna(0).astype(int)
# Volume MA
df['volume_ma_20'] = talib.SMA(volume, timeperiod=20)
return df
# ========== Health Check Server ==========

196
analysis/backfill_indicators.py Executable file
View File

@@ -0,0 +1,196 @@
#!/usr/bin/env python3
"""
Backfill Missing Indicators
Calculates RSI and Bollinger Bands for existing data
"""
import sqlite3
import pandas as pd
import numpy as np
import talib
from datetime import datetime
def backfill_indicators(candles_db: str, analysis_db: str):
"""Backfill RSI and Bollinger Bands for all timeframes"""
print("🔧 BACKFILLING MISSING INDICATORS")
print("=" * 70)
# Connect to databases
conn_candles = sqlite3.connect(candles_db)
conn_analysis = sqlite3.connect(analysis_db)
# Get all timeframes
cursor = conn_analysis.cursor()
cursor.execute("SELECT DISTINCT timeframe FROM analysis ORDER BY timeframe")
timeframes = [row[0] for row in cursor.fetchall()]
total_updated = 0
for timeframe in timeframes:
print(f"\n📊 Processing {timeframe}...")
# Fetch candle data
df_candles = pd.read_sql_query(
"SELECT timestamp, close, high, low FROM candles WHERE timeframe = ? ORDER BY timestamp",
conn_candles,
params=(timeframe,)
)
if len(df_candles) < 20:
print(f" ⚠️ Skipping - insufficient data ({len(df_candles)} rows)")
continue
print(f" ✓ Loaded {len(df_candles)} candles")
# Calculate RSI
df_candles['rsi_14'] = talib.RSI(df_candles['close'].values, timeperiod=14)
# Calculate Bollinger Bands
bb_upper, bb_middle, bb_lower = talib.BBANDS(
df_candles['close'].values,
timeperiod=20,
nbdevup=2,
nbdevdn=2,
matype=0
)
df_candles['bb_upper'] = bb_upper
df_candles['bb_middle'] = bb_middle
df_candles['bb_lower'] = bb_lower
# Calculate BB Squeeze
# Squeeze = when BB width is in the lowest 20% of recent widths
df_candles['bb_width'] = df_candles['bb_upper'] - df_candles['bb_lower']
df_candles['bb_width_rank'] = df_candles['bb_width'].rolling(window=100).apply(
lambda x: (x.iloc[-1] <= x.quantile(0.2)).astype(int) if len(x) >= 20 else 0,
raw=False
)
df_candles['bb_squeeze'] = df_candles['bb_width_rank'].fillna(0).astype(int)
# Update analysis database
cursor_update = conn_analysis.cursor()
updated = 0
for _, row in df_candles.iterrows():
cursor_update.execute("""
UPDATE analysis
SET rsi_14 = ?, bb_upper = ?, bb_middle = ?, bb_lower = ?, bb_squeeze = ?
WHERE timeframe = ? AND timestamp = ?
""", (
float(row['rsi_14']) if not pd.isna(row['rsi_14']) else None,
float(row['bb_upper']) if not pd.isna(row['bb_upper']) else None,
float(row['bb_middle']) if not pd.isna(row['bb_middle']) else None,
float(row['bb_lower']) if not pd.isna(row['bb_lower']) else None,
int(row['bb_squeeze']),
timeframe,
int(row['timestamp'])
))
updated += cursor_update.rowcount
conn_analysis.commit()
total_updated += updated
print(f" ✅ Updated {updated} rows")
# Show sample
latest = df_candles.iloc[-1]
print(f" Latest RSI: {latest['rsi_14']:.2f}" if not pd.isna(latest['rsi_14']) else " Latest RSI: NULL")
print(f" Latest BB: Upper=${latest['bb_upper']:.2f}, Lower=${latest['bb_lower']:.2f}" if not pd.isna(latest['bb_upper']) else " Latest BB: NULL")
conn_candles.close()
conn_analysis.close()
print(f"\n{'='*70}")
print(f"✅ BACKFILL COMPLETE!")
print(f" Total rows updated: {total_updated}")
print(f"{'='*70}")
def verify_backfill(analysis_db: str):
"""Verify the backfill worked"""
print("\n🔍 VERIFICATION")
print("=" * 70)
conn = sqlite3.connect(analysis_db)
cursor = conn.cursor()
cursor.execute("SELECT DISTINCT timeframe FROM analysis")
timeframes = [row[0] for row in cursor.fetchall()]
for tf in timeframes:
# Count NULL values
cursor.execute("""
SELECT
COUNT(*) as total,
SUM(CASE WHEN rsi_14 IS NULL THEN 1 ELSE 0 END) as rsi_null,
SUM(CASE WHEN bb_upper IS NULL THEN 1 ELSE 0 END) as bb_null
FROM analysis
WHERE timeframe = ?
""", (tf,))
total, rsi_null, bb_null = cursor.fetchone()
print(f"\n{tf}:")
print(f" Total rows: {total}")
print(f" RSI NULL: {rsi_null} ({rsi_null/total*100:.1f}%)" if total > 0 else " RSI NULL: N/A")
print(f" BB NULL: {bb_null} ({bb_null/total*100:.1f}%)" if total > 0 else " BB NULL: N/A")
# Get latest values
cursor.execute("""
SELECT rsi_14, bb_upper, bb_lower, bb_squeeze
FROM analysis
WHERE timeframe = ?
ORDER BY timestamp DESC
LIMIT 1
""", (tf,))
row = cursor.fetchone()
if row and row[0] is not None:
print(f" ✅ Latest: RSI={row[0]:.2f}, BB_upper=${row[1]:.2f}, BB_squeeze={row[3]}")
else:
print(f" ❌ Latest values still NULL")
conn.close()
def main():
import json
# Load config
try:
with open("config.json", "r") as f:
config = json.load(f)
candles_db = config.get("candles_db", "../onramp/market_data.db")
analysis_db = config.get("analysis_db", "../analysis/analysis.db")
except FileNotFoundError:
print("❌ config.json not found, using default paths")
candles_db = "../onramp/market_data.db"
analysis_db = "../analysis/analysis.db"
print(f"Candles DB: {candles_db}")
print(f"Analysis DB: {analysis_db}")
try:
backfill_indicators(candles_db, analysis_db)
verify_backfill(analysis_db)
print("\n💡 NEXT STEPS:")
print("=" * 70)
print("1. Run the signal debugger again:")
print(" python3 signal_debugger.py")
print("\n2. Restart the signal generator:")
print(" pkill -f signals.py")
print(" ./signals.py")
print("\n3. Update your analysis pipeline to calculate these indicators")
print(" going forward so you don't need to backfill again")
except Exception as e:
print(f"\n❌ Error: {e}")
import traceback
traceback.print_exc()
if __name__ == "__main__":
main()

158
signals/analysis_inspector.py Executable file
View File

@@ -0,0 +1,158 @@
#!/usr/bin/env python3
"""
Analysis Database Inspector
Check what indicators are actually populated
"""
import sqlite3
import json
def load_config():
with open("config.json", "r") as f:
return json.load(f)
def inspect_database(db_path):
"""Inspect analysis database schema and data"""
print(f"\n📊 Inspecting: {db_path}")
print("=" * 70)
try:
conn = sqlite3.connect(f"file:{db_path}?mode=ro", uri=True)
cursor = conn.cursor()
# Get table schema
cursor.execute("PRAGMA table_info(analysis)")
columns = cursor.fetchall()
print("\n📋 TABLE SCHEMA:")
print(f"{'Column Name':<20} {'Type':<15} {'Not Null':<10}")
print("-" * 50)
for col in columns:
print(f"{col[1]:<20} {col[2]:<15} {'YES' if col[3] else 'NO':<10}")
# Get row count
cursor.execute("SELECT COUNT(*) FROM analysis")
total_rows = cursor.fetchone()[0]
print(f"\n📊 Total rows: {total_rows}")
# Check data availability per timeframe
cursor.execute("SELECT DISTINCT timeframe FROM analysis ORDER BY timeframe")
timeframes = [row[0] for row in cursor.fetchall()]
print("\n⏱️ DATA BY TIMEFRAME:")
for tf in timeframes:
cursor.execute(f"SELECT COUNT(*) FROM analysis WHERE timeframe = ?", (tf,))
count = cursor.fetchone()[0]
print(f" {tf}: {count} rows")
# Check for NULL values in key indicators
print("\n🔍 NULL VALUE CHECK (latest 10 rows per timeframe):")
indicator_cols = [
'ema_9', 'ema_21', 'sma_50', 'sma_200',
'rsi_14', 'macd', 'macd_signal', 'macd_hist',
'bb_upper', 'bb_middle', 'bb_lower', 'bb_squeeze',
'volume_ma_20'
]
for tf in timeframes:
print(f"\n Timeframe: {tf}")
# Get latest row
cursor.execute(f"""
SELECT * FROM analysis
WHERE timeframe = ?
ORDER BY timestamp DESC
LIMIT 1
""", (tf,))
row = cursor.fetchone()
col_names = [desc[0] for desc in cursor.description]
if row:
row_dict = dict(zip(col_names, row))
null_indicators = []
present_indicators = []
for ind in indicator_cols:
if ind in row_dict:
if row_dict[ind] is None:
null_indicators.append(ind)
else:
present_indicators.append(ind)
else:
null_indicators.append(f"{ind} (MISSING COLUMN)")
if present_indicators:
print(f" ✓ Present: {', '.join(present_indicators[:5])}")
if len(present_indicators) > 5:
print(f" {', '.join(present_indicators[5:])}")
if null_indicators:
print(f" ❌ NULL/Missing: {', '.join(null_indicators)}")
# Show sample values
print(f"\n Sample values from latest row:")
print(f" Timestamp: {row_dict.get('timestamp')}")
for ind in ['ema_9', 'ema_21', 'rsi_14', 'bb_upper']:
if ind in row_dict:
val = row_dict[ind]
if val is not None:
print(f" {ind}: {val}")
else:
print(f" {ind}: NULL")
else:
print(f" ❌ No data found")
# Check if buy_volume exists in candles
print("\n\n📊 Checking candles table for buy_volume...")
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='candles'")
if cursor.fetchone():
cursor.execute("PRAGMA table_info(candles)")
candles_cols = [col[1] for col in cursor.fetchall()]
if 'buy_volume' in candles_cols:
print(" ✓ buy_volume column exists in candles table")
# Check if it has data
cursor.execute("SELECT COUNT(*) FROM candles WHERE buy_volume IS NOT NULL")
count = cursor.fetchone()[0]
print(f"{count} rows with buy_volume data")
else:
print(" ❌ buy_volume column MISSING from candles table")
print(" Available columns:", ', '.join(candles_cols))
conn.close()
except sqlite3.OperationalError as e:
print(f" ❌ Database error: {e}")
except Exception as e:
print(f" ❌ Error: {e}")
def main():
config = load_config()
print("🔍 ANALYSIS DATABASE INSPECTOR")
print("=" * 70)
inspect_database(config["analysis_db"])
print("\n\n💡 NEXT STEPS:")
print("=" * 70)
print("If indicators are missing:")
print(" 1. Check your analysis pipeline is running")
print(" 2. Verify the analysis script calculates these indicators:")
print(" - rsi_14, bb_upper, bb_lower, bb_middle, bb_squeeze")
print(" 3. Re-run analysis on existing candle data")
print("\nIf buy_volume is missing:")
print(" 1. Update your candles table schema")
print(" 2. Modify your data ingestion to capture buy_volume")
print(" 3. Or set buy_volume = volume/2 as approximation")
if __name__ == "__main__":
main()

View File

@@ -10,7 +10,7 @@
"personality": "scalping",
"timeframes": ["1m", "5m"],
"lookback": 200,
"min_confidence": 0.45,
"min_confidence": 0.40,
"cooldown_seconds": 30,
"weights": {
"scalping": {
@@ -18,12 +18,12 @@
"stoch": 0.2,
"rsi": 0.2,
"volume": 0.2,
"macd": 0.15
"macd": 0.3
},
"swing": {
"regime": 0.35,
"bb_squeeze": 0.25,
"macd": 0.2,
"macd": 0.3,
"flow": 0.15,
"rsi": 0.05
}

357
signals/signal_debugger.py Executable file
View File

@@ -0,0 +1,357 @@
#!/usr/bin/env python3
"""
Signal Generator Debugger
Analyzes why signals aren't being generated
"""
import sqlite3
import pandas as pd
import numpy as np
import talib
import json
from datetime import datetime
import sys
def load_config():
with open("config.json", "r") as f:
return json.load(f)
def fetch_data(candles_db, analysis_db, timeframe, lookback=200):
"""Fetch and enrich data exactly like signals.py does"""
try:
conn_c = sqlite3.connect(f"file:{candles_db}?mode=ro", uri=True, timeout=10)
conn_c.execute(f"ATTACH DATABASE 'file:{analysis_db}?mode=ro' AS analysis_db")
query = """
SELECT
c.timeframe, c.timestamp, c.open, c.high, c.low, c.close,
c.volume, c.buy_volume,
a.ema_9, a.ema_21, a.sma_50, a.sma_200,
a.rsi_14, a.macd, a.macd_signal, a.macd_hist,
a.bb_upper, a.bb_middle, a.bb_lower, a.bb_squeeze,
a.volume_ma_20
FROM candles c
JOIN analysis_db.analysis a
ON c.timeframe = a.timeframe
AND c.timestamp = a.timestamp
WHERE c.timeframe = ?
ORDER BY c.timestamp DESC
LIMIT ?
"""
df = pd.read_sql_query(query, conn_c, params=(timeframe, lookback))
conn_c.close()
if df.empty:
return None
df = df.sort_values("timestamp").reset_index(drop=True)
df["datetime"] = pd.to_datetime(df["timestamp"], unit="s")
# Filter closed candles
import time
current_time = int(time.time())
window = {"1m": 60, "5m": 300, "15m": 900, "1h": 3600}.get(timeframe, 60)
df = df[df["timestamp"] < (current_time - window)]
if len(df) < 50:
return None
df = df.dropna(subset=["open", "high", "low", "close", "volume"])
if len(df) < 50:
return None
# Add Stochastic
df["stoch_k"], df["stoch_d"] = talib.STOCH(
df["high"].values,
df["low"].values,
df["close"].values,
fastk_period=14,
slowk_period=3,
slowd_period=3,
)
df["buy_ratio"] = df["buy_volume"] / df["volume"].replace(0, np.nan)
df["net_flow"] = df["buy_volume"] - (df["volume"] - df["buy_volume"])
return df
except Exception as e:
print(f"Error fetching data: {e}")
return None
def analyze_scalping(df, weights, min_confidence):
"""Analyze scalping signal generation"""
if len(df) < 21:
print(" ❌ Insufficient data for scalping (need 21+ rows)")
return
latest = df.iloc[-1]
prev = df.iloc[-2]
print(f"\n📊 SCALPING ANALYSIS ({len(df)} candles)")
print("=" * 70)
# Check for NULL values
required = ["ema_9", "ema_21", "rsi_14", "stoch_k", "stoch_d", "macd", "macd_signal"]
null_cols = [col for col in required if pd.isna(latest[col])]
if null_cols:
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
return
else:
print(" ✓ All required indicators present")
print(f"\n Latest candle: {latest['datetime']}")
print(f" Close: ${latest['close']:.2f}")
# EMA Analysis
print(f"\n EMA Crossover Check:")
print(f" Current: EMA9={latest['ema_9']:.2f} vs EMA21={latest['ema_21']:.2f}")
print(f" Previous: EMA9={prev['ema_9']:.2f} vs EMA21={prev['ema_21']:.2f}")
ema_cross_up = latest["ema_9"] > latest["ema_21"] and prev["ema_9"] <= prev["ema_21"]
ema_cross_down = latest["ema_9"] < latest["ema_21"] and prev["ema_9"] >= prev["ema_21"]
if ema_cross_up:
print(f" ✓ BULLISH CROSSOVER DETECTED!")
signal_type = "BUY"
score = weights["ema_cross"]
elif ema_cross_down:
print(f" ✓ BEARISH CROSSOVER DETECTED!")
signal_type = "SELL"
score = weights["ema_cross"]
else:
print(f" ❌ No crossover (EMA9 {'above' if latest['ema_9'] > latest['ema_21'] else 'below'} EMA21)")
# Show trend direction
ema_diff = latest["ema_9"] - latest["ema_21"]
prev_diff = prev["ema_9"] - prev["ema_21"]
trend = "converging" if abs(ema_diff) < abs(prev_diff) else "diverging"
print(f" EMAs are {trend} (diff: {ema_diff:.2f} vs prev: {prev_diff:.2f})")
return
# We have a crossover, check other indicators
print(f"\n Signal Type: {signal_type}")
print(f" Base Score: {score:.3f} (from EMA crossover)")
# Stochastic
print(f"\n Stochastic:")
print(f" K={latest['stoch_k']:.1f}, D={latest['stoch_d']:.1f}")
if signal_type == "BUY":
if latest["stoch_k"] > latest["stoch_d"] and latest["stoch_k"] < 30:
score += weights["stoch"]
print(f" ✓ Oversold crossover (+{weights['stoch']:.3f})")
else:
print(f" ❌ Not oversold crossover (K>D: {latest['stoch_k'] > latest['stoch_d']}, K<30: {latest['stoch_k'] < 30})")
else:
if latest["stoch_k"] < latest["stoch_d"] and latest["stoch_k"] > 70:
score += weights["stoch"]
print(f" ✓ Overbought crossover (+{weights['stoch']:.3f})")
else:
print(f" ❌ Not overbought crossover (K<D: {latest['stoch_k'] < latest['stoch_d']}, K>70: {latest['stoch_k'] > 70})")
# RSI
print(f"\n RSI: {latest['rsi_14']:.1f}")
if signal_type == "BUY" and latest["rsi_14"] < 40:
score += weights["rsi"]
print(f" ✓ Undersold (+{weights['rsi']:.3f})")
elif signal_type == "SELL" and latest["rsi_14"] > 60:
score += weights["rsi"]
print(f" ✓ Oversold (+{weights['rsi']:.3f})")
else:
print(f" ❌ Not in range (BUY needs <40, SELL needs >60)")
# Volume
vol_ratio = latest["volume"] / latest["volume_ma_20"] if latest["volume_ma_20"] else 0
print(f"\n Volume: {latest['volume']:.2f} vs MA20: {latest['volume_ma_20']:.2f}")
print(f" Ratio: {vol_ratio:.2f}x")
if vol_ratio > 1.5:
score += weights["volume"]
print(f" ✓ Volume surge (+{weights['volume']:.3f})")
else:
print(f" ❌ No surge (need >1.5x)")
# MACD
print(f"\n MACD: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
if signal_type == "BUY" and latest["macd"] > latest["macd_signal"]:
score += weights["macd"]
print(f" ✓ Bullish (+{weights['macd']:.3f})")
elif signal_type == "SELL" and latest["macd"] < latest["macd_signal"]:
score += weights["macd"]
print(f" ✓ Bearish (+{weights['macd']:.3f})")
else:
print(f" ❌ Not aligned")
# Final score
print(f"\n {'='*70}")
print(f" FINAL SCORE: {score:.3f}")
print(f" THRESHOLD: {min_confidence:.3f}")
if score >= min_confidence:
print(f" ✅ SIGNAL WOULD BE GENERATED!")
else:
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
def analyze_swing(df, weights, min_confidence):
"""Analyze swing signal generation"""
if len(df) < 200:
print(f" ❌ Insufficient data for swing (need 200+ rows, have {len(df)})")
return
latest = df.iloc[-1]
prev = df.iloc[-2]
print(f"\n📊 SWING ANALYSIS ({len(df)} candles)")
print("=" * 70)
# Check for NULL values
required = ["sma_50", "sma_200", "bb_upper", "bb_lower", "bb_squeeze", "macd", "macd_signal", "buy_ratio"]
null_cols = [col for col in required if pd.isna(latest[col])]
if null_cols:
print(f" ❌ SKIPPED: Missing indicators: {', '.join(null_cols)}")
return
else:
print(" ✓ All required indicators present")
print(f"\n Latest candle: {latest['datetime']}")
print(f" Close: ${latest['close']:.2f}")
# Regime Analysis
print(f"\n Regime Analysis:")
print(f" Price: ${latest['close']:.2f}")
print(f" SMA50: ${latest['sma_50']:.2f}")
print(f" SMA200: ${latest['sma_200']:.2f}")
bull_regime = latest["close"] > latest["sma_50"] > latest["sma_200"]
bear_regime = latest["close"] < latest["sma_50"] < latest["sma_200"]
score = 0
signal_type = None
if bull_regime:
signal_type = "BUY"
score += weights["regime"]
print(f" ✓ BULL REGIME (Price > SMA50 > SMA200) (+{weights['regime']:.3f})")
elif bear_regime:
signal_type = "SELL"
score += weights["regime"]
print(f" ✓ BEAR REGIME (Price < SMA50 < SMA200) (+{weights['regime']:.3f})")
else:
print(f" ❌ No clear regime")
print(f" Price vs SMA50: {'above' if latest['close'] > latest['sma_50'] else 'below'}")
print(f" SMA50 vs SMA200: {'above' if latest['sma_50'] > latest['sma_200'] else 'below'}")
return
print(f"\n Signal Type: {signal_type}")
print(f" Base Score: {score:.3f} (from regime)")
# BB Squeeze
print(f"\n Bollinger Bands:")
print(f" Squeeze: {latest['bb_squeeze']} (prev: {prev['bb_squeeze']})")
print(f" Upper: ${latest['bb_upper']:.2f}, Lower: ${latest['bb_lower']:.2f}")
if latest["bb_squeeze"] == 1 or prev["bb_squeeze"] == 1:
if signal_type == "BUY" and latest["close"] > latest["bb_upper"]:
score += weights["bb_squeeze"]
print(f" ✓ Squeeze breakout upside (+{weights['bb_squeeze']:.3f})")
elif signal_type == "SELL" and latest["close"] < latest["bb_lower"]:
score += weights["bb_squeeze"]
print(f" ✓ Squeeze breakout downside (+{weights['bb_squeeze']:.3f})")
else:
print(f" ❌ Squeeze present but no breakout")
else:
print(f" ❌ No squeeze")
# MACD
print(f"\n MACD:")
print(f" Current: {latest['macd']:.2f} vs Signal: {latest['macd_signal']:.2f}")
print(f" Previous: {prev['macd']:.2f} vs Signal: {prev['macd_signal']:.2f}")
macd_cross_up = latest["macd"] > latest["macd_signal"] and prev["macd"] <= prev["macd_signal"]
macd_cross_down = latest["macd"] < latest["macd_signal"] and prev["macd"] >= prev["macd_signal"]
if signal_type == "BUY" and macd_cross_up:
score += weights["macd"]
print(f" ✓ Bullish crossover (+{weights['macd']:.3f})")
elif signal_type == "SELL" and macd_cross_down:
score += weights["macd"]
print(f" ✓ Bearish crossover (+{weights['macd']:.3f})")
else:
print(f" ❌ No crossover or not aligned")
# Net flow
print(f"\n Buy/Sell Pressure:")
print(f" Buy Ratio: {latest['buy_ratio']:.2%}")
if signal_type == "BUY" and latest["buy_ratio"] > 0.55:
score += weights["flow"]
print(f" ✓ Strong buy pressure (+{weights['flow']:.3f})")
elif signal_type == "SELL" and latest["buy_ratio"] < 0.45:
score += weights["flow"]
print(f" ✓ Strong sell pressure (+{weights['flow']:.3f})")
else:
print(f" ❌ Neutral pressure")
# RSI
print(f"\n RSI: {latest['rsi_14']:.1f}")
if signal_type == "BUY" and latest["rsi_14"] < 50:
score += weights["rsi"]
print(f" ✓ Not overbought (+{weights['rsi']:.3f})")
elif signal_type == "SELL" and latest["rsi_14"] > 50:
score += weights["rsi"]
print(f" ✓ Not oversold (+{weights['rsi']:.3f})")
else:
print(f" ❌ Unfavorable")
# Final score
print(f"\n {'='*70}")
print(f" FINAL SCORE: {score:.3f}")
print(f" THRESHOLD: {min_confidence:.3f}")
if score >= min_confidence:
print(f" ✅ SIGNAL WOULD BE GENERATED!")
else:
print(f" ❌ Below threshold (need {min_confidence - score:.3f} more)")
def main():
config = load_config()
print("🔍 SIGNAL GENERATOR DEBUGGER")
print("=" * 70)
print(f"Min Confidence: {config['min_confidence']}")
print(f"Timeframes: {', '.join(config['timeframes'])}")
print(f"Lookback: {config['lookback']} candles")
for timeframe in config["timeframes"]:
print(f"\n\n{'='*70}")
print(f"TIMEFRAME: {timeframe}")
print(f"{'='*70}")
df = fetch_data(config["candles_db"], config["analysis_db"], timeframe, config["lookback"])
if df is None:
print(f" ❌ No data available")
continue
print(f" ✓ Loaded {len(df)} candles")
# Analyze both personalities
analyze_scalping(df, config["weights"]["scalping"], config["min_confidence"])
analyze_swing(df, config["weights"]["swing"], config["min_confidence"])
if __name__ == "__main__":
main()

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
"""
Health Check Client for Signal Generator
Health Check Client for Multi-Personality Signal Generator
Query the running signal generator status
"""
@@ -28,19 +28,61 @@ def check_health(socket_path="/tmp/signals_health.sock"):
# Parse and display
health = json.loads(response.decode('utf-8'))
print("=" * 60)
print("SIGNAL GENERATOR HEALTH STATUS")
print("=" * 60)
print("=" * 70)
print("MULTI-PERSONALITY SIGNAL GENERATOR HEALTH STATUS")
print("=" * 70)
print(f"Status: {health['status']}")
print(f"Personality: {health['personality']}")
print(f"Mode: {health.get('mode', 'single-personality')}")
# Handle both old single-personality and new multi-personality format
if 'personalities' in health:
print(f"Personalities: {', '.join(health['personalities'])}")
elif 'personality' in health:
print(f"Personality: {health['personality']}")
print(f"Timeframes: {', '.join(health['timeframes'])}")
print(f"Uptime: {health['uptime_seconds']}s ({health['uptime_seconds']//60}m)")
print(f"Total Signals: {health['total_signals']}")
print(f" - Buy: {health['buy_signals']}")
print(f" - Sell: {health['sell_signals']}")
print(f"Last Signal: {health['last_signal'] or 'None'}")
print(f"Errors: {health['errors']}")
print(f"Connected Clients: {health['connected_clients']}")
print(f"Debug Mode: {'ON' if health.get('debug_mode') else 'OFF'}")
# Total stats (aggregated across all personalities)
if 'total_stats' in health:
total = health['total_stats']
print(f"\nAGGREGATED STATISTICS:")
print(f" Total Signals: {total['total_signals']}")
print(f" - Buy: {total['buy_signals']}")
print(f" - Sell: {total['sell_signals']}")
print(f" Errors: {total['errors']}")
else:
# Fallback for old format
print(f"\nSTATISTICS:")
print(f" Total Signals: {health.get('total_signals', 0)}")
print(f" - Buy: {health.get('buy_signals', 0)}")
print(f" - Sell: {health.get('sell_signals', 0)}")
print(f" Errors: {health.get('errors', 0)}")
# Per-personality breakdown (if available)
if 'personality_stats' in health:
print("\n" + "=" * 70)
print("PER-PERSONALITY BREAKDOWN")
print("=" * 70)
for personality, stats in health['personality_stats'].items():
print(f"\n{personality.upper()}:")
print(f" Total Signals: {stats['total_signals']}")
print(f" - Buy: {stats['buy_signals']}")
print(f" - Sell: {stats['sell_signals']}")
print(f" Last Signal: {stats['last_signal'] or 'None'}")
print(f" Errors: {stats['errors']}")
if stats.get('recent_signals'):
print(f" Recent Signals:")
for sig in stats['recent_signals'][:3]:
print(f" [{sig['timeframe']}] {sig['signal']} @ ${sig['price']:.2f} "
f"(conf: {sig['confidence']:.2f})")
else:
# Old format
print(f" Last Signal: {health.get('last_signal') or 'None'}")
# Database Status
print("\n" + "=" * 70)
@@ -58,20 +100,22 @@ def check_health(socket_path="/tmp/signals_health.sock"):
print(f" Total Rows: {candles.get('row_count', 0)}")
for tf, info in candles.get('timeframes', {}).items():
age = info.get('age_seconds')
age_str = f"{age}s ago" if age else "N/A"
print(f" [{tf}]: {info['count']} rows, latest: {age_str}")
age_str = f"{age}s ago" if age is not None else "N/A"
status = "⚠ STALE" if age and age > 300 else ""
print(f" [{tf}]: {info['count']} rows, latest: {age_str} {status}")
# Analysis DB
analysis = db.get('analysis_db', {})
print(f"Analysis DB: {'✓ OK' if analysis.get('accessible') else '✗ FAILED'}")
print(f"\nAnalysis DB: {'✓ OK' if analysis.get('accessible') else '✗ FAILED'}")
if analysis.get('error'):
print(f" Error: {analysis['error']}")
else:
print(f" Total Rows: {analysis.get('row_count', 0)}")
for tf, info in analysis.get('timeframes', {}).items():
age = info.get('age_seconds')
age_str = f"{age}s ago" if age else "N/A"
print(f" [{tf}]: {info['count']} rows, latest: {age_str}")
age_str = f"{age}s ago" if age is not None else "N/A"
status = "⚠ STALE" if age and age > 300 else ""
print(f" [{tf}]: {info['count']} rows, latest: {age_str} {status}")
# Configuration
print("\n" + "=" * 70)
@@ -81,21 +125,38 @@ def check_health(socket_path="/tmp/signals_health.sock"):
print(f"Min Confidence: {cfg.get('min_confidence', 'N/A')}")
print(f"Cooldown: {cfg.get('cooldown_seconds', 'N/A')}s")
print(f"Lookback: {cfg.get('lookback', 'N/A')} candles")
print(f"Config Reloads: {cfg.get('reloads', 0)}")
if cfg.get('weights'):
print(f"Weights:")
for indicator, weight in cfg['weights'].items():
print(f" {indicator:12s} {weight}")
# Multi-personality format
if isinstance(cfg['weights'], dict) and any(k in cfg['weights'] for k in ['scalping', 'swing']):
for personality, weights in cfg['weights'].items():
print(f"\n{personality.upper()} Weights:")
for indicator, weight in weights.items():
print(f" {indicator:12s} {weight}")
else:
# Single personality format
print(f"Weights:")
for indicator, weight in cfg['weights'].items():
print(f" {indicator:12s} {weight}")
if health['recent_signals']:
print("\n" + "=" * 60)
print("RECENT SIGNALS")
print("=" * 60)
for sig in health['recent_signals']:
print(f" [{sig['timeframe']}] {sig['signal']} @ ${sig['price']:.2f} "
# Recent signals (all personalities combined)
recent = health.get('recent_signals', [])
if recent:
print("\n" + "=" * 70)
print("RECENT SIGNALS (ALL PERSONALITIES)")
print("=" * 70)
for sig in recent[:10]:
personality_tag = f"[{sig.get('personality', '?').upper()}]"
print(f" {personality_tag:12s} [{sig['timeframe']}] {sig['signal']} @ ${sig['price']:.2f} "
f"(conf: {sig['confidence']:.2f})")
print(f" Reasons: {', '.join(sig['reasons'])}")
if sig.get('reasons'):
reasons_str = ', '.join(sig['reasons'][:3])
if len(sig['reasons']) > 3:
reasons_str += f" (+{len(sig['reasons'])-3} more)"
print(f" Reasons: {reasons_str}")
print("=" * 60)
print("=" * 70)
return 0
@@ -106,8 +167,15 @@ def check_health(socket_path="/tmp/signals_health.sock"):
except ConnectionRefusedError:
print(f"Error: Connection refused at {socket_path}")
return 1
except KeyError as e:
print(f"Error: Missing expected field in health response: {e}")
print("\nRaw response:")
print(json.dumps(health, indent=2))
return 1
except Exception as e:
print(f"Error: {e}")
import traceback
traceback.print_exc()
return 1
if __name__ == "__main__":

361
signals/signals.py Executable file → Normal file
View File

@@ -1,7 +1,8 @@
#!/usr/bin/env python3
"""
BTCUSDT Signal Generator
BTCUSDT Signal Generator - Multi-Personality Edition
Generates trading signals from candles.db and analysis.db
Runs both scalping and swing personalities simultaneously
Streams signals via Unix Domain Socket
"""
@@ -46,11 +47,10 @@ class Config:
self.log_file = data.get("log_file", "logs/signal_generator.log")
self.log_to_stdout = data.get("log_to_stdout", True)
self.poll_interval = data.get("poll_interval", 0.5)
self.personality = data.get("personality", "scalping")
self.timeframes = data.get("timeframes", ["1m", "5m"])
self.lookback = data.get("lookback", 200)
# Signal thresholds
# Signal thresholds (can be personality-specific)
self.min_confidence = data.get("min_confidence", 0.6)
self.cooldown_seconds = data.get("cooldown_seconds", 60)
@@ -92,13 +92,14 @@ def setup_logging(config: Config):
return logging.getLogger(__name__)
# Signal Generator Class
class SignalGenerator:
def __init__(self, config: Config, logger: logging.Logger):
# Signal Generator Class (per-personality)
class PersonalityEngine:
"""Single personality signal generation engine"""
def __init__(self, personality: str, config: Config, logger: logging.Logger):
self.personality = personality
self.config = config
self.logger = logger
self.running = False
self.debug_mode = False
self.last_signal_time = {}
self.signal_history = deque(maxlen=100)
self.stats = {
@@ -106,20 +107,9 @@ class SignalGenerator:
"buy_signals": 0,
"sell_signals": 0,
"last_signal_time": None,
"uptime_start": datetime.now(timezone.utc),
"errors": 0,
"config_reloads": 0,
}
# Unix socket
self.socket = None
self.connections = []
# Health check socket
self.health_socket = None
# Control socket
self.control_socket = None
self.lock = threading.Lock()
def fetch_and_enrich(self, timeframe: str) -> Optional[pd.DataFrame]:
"""Fetch data from databases and enrich with additional indicators"""
@@ -181,7 +171,7 @@ class SignalGenerator:
df = df[df["timestamp"] < (current_time - window)]
if len(df) < 50:
self.logger.debug(f"Not enough data for {timeframe}: {len(df)} rows")
self.logger.debug(f"[{self.personality}] Not enough data for {timeframe}: {len(df)} rows")
return None
# Drop rows with NULL in critical columns
@@ -189,7 +179,7 @@ class SignalGenerator:
if len(df) < 50:
self.logger.debug(
f"Not enough valid data after NULL filtering for {timeframe}"
f"[{self.personality}] Not enough valid data after NULL filtering for {timeframe}"
)
return None
@@ -210,8 +200,9 @@ class SignalGenerator:
return df
except Exception as e:
self.logger.error(f"Error fetching data for {timeframe}: {e}")
self.stats["errors"] += 1
self.logger.error(f"[{self.personality}] Error fetching data for {timeframe}: {e}")
with self.lock:
self.stats["errors"] += 1
return None
def generate_signal_scalping(
@@ -219,13 +210,13 @@ class SignalGenerator:
) -> Optional[Dict]:
"""Generate signal using scalping personality"""
if len(df) < 21:
self.logger.debug(f"[{timeframe}] Insufficient data: {len(df)} rows")
self.logger.debug(f"[{self.personality}/{timeframe}] Insufficient data: {len(df)} rows")
return None
latest = df.iloc[-1]
prev = df.iloc[-2]
# Check for NULL indicators - skip if essential indicators are missing
# Check for NULL indicators
required_cols = [
"ema_9",
"ema_21",
@@ -236,7 +227,7 @@ class SignalGenerator:
"macd_signal",
]
if any(pd.isna(latest[col]) for col in required_cols):
self.logger.debug(f"[{timeframe}] Skipping: missing required indicators")
self.logger.debug(f"[{self.personality}/{timeframe}] Skipping: missing required indicators")
return None
score = 0
@@ -262,12 +253,6 @@ class SignalGenerator:
reasons.append("EMA9 crossed below EMA21")
signal_type = "SELL"
# Log EMA status for debugging
self.logger.debug(
f"[{timeframe}] EMA9={latest['ema_9']:.2f} vs EMA21={latest['ema_21']:.2f}, "
f"Prev: EMA9={prev['ema_9']:.2f} vs EMA21={prev['ema_21']:.2f}"
)
# Stochastic
if signal_type == "BUY":
if latest["stoch_k"] > latest["stoch_d"] and latest["stoch_k"] < 30:
@@ -299,13 +284,6 @@ class SignalGenerator:
score += weights["macd"]
reasons.append("MACD bearish")
# Debug output
if signal_type:
self.logger.debug(
f"[{timeframe}] Potential {signal_type} signal - Score: {score:.3f} "
f"(threshold: {self.config.min_confidence}), Reasons: {len(reasons)}"
)
if signal_type and score >= self.config.min_confidence:
return {
"signal": signal_type,
@@ -327,7 +305,7 @@ class SignalGenerator:
latest = df.iloc[-1]
prev = df.iloc[-2]
# Check for NULL indicators - skip if essential indicators are missing
# Check for NULL indicators
required_cols = [
"sma_50",
"sma_200",
@@ -339,7 +317,7 @@ class SignalGenerator:
"buy_ratio",
]
if any(pd.isna(latest[col]) for col in required_cols):
self.logger.debug(f"Skipping {timeframe}: missing required indicators")
self.logger.debug(f"[{self.personality}/{timeframe}] Skipping: missing required indicators")
return None
score = 0
@@ -418,7 +396,7 @@ class SignalGenerator:
def generate_signal(self, timeframe: str) -> Optional[Dict]:
"""Main signal generation dispatcher"""
# Check cooldown
cooldown_key = f"{self.config.personality}_{timeframe}"
cooldown_key = f"{self.personality}_{timeframe}"
if cooldown_key in self.last_signal_time:
elapsed = time.time() - self.last_signal_time[cooldown_key]
if elapsed < self.config.cooldown_seconds:
@@ -428,12 +406,12 @@ class SignalGenerator:
if df is None:
return None
if self.config.personality == "scalping":
if self.personality == "scalping":
signal = self.generate_signal_scalping(df, timeframe)
elif self.config.personality == "swing":
elif self.personality == "swing":
signal = self.generate_signal_swing(df, timeframe)
else:
self.logger.error(f"Unknown personality: {self.config.personality}")
self.logger.error(f"Unknown personality: {self.personality}")
return None
if signal:
@@ -441,40 +419,76 @@ class SignalGenerator:
signal["generated_at"] = datetime.now(timezone.utc).isoformat()
# Update stats
self.stats["total_signals"] += 1
if signal["signal"] == "BUY":
self.stats["buy_signals"] += 1
else:
self.stats["sell_signals"] += 1
self.stats["last_signal_time"] = signal["generated_at"]
self.signal_history.append(signal)
with self.lock:
self.stats["total_signals"] += 1
if signal["signal"] == "BUY":
self.stats["buy_signals"] += 1
else:
self.stats["sell_signals"] += 1
self.stats["last_signal_time"] = signal["generated_at"]
self.signal_history.append(signal)
return signal
# Main Signal Generator Coordinator
class SignalGenerator:
def __init__(self, config: Config, logger: logging.Logger):
self.config = config
self.logger = logger
self.running = False
self.debug_mode = False
# Create personality engines
self.engines = {
"scalping": PersonalityEngine("scalping", config, logger),
"swing": PersonalityEngine("swing", config, logger),
}
self.global_stats = {
"uptime_start": datetime.now(timezone.utc),
"config_reloads": 0,
}
# Unix socket
self.socket = None
self.connections = []
self.connections_lock = threading.Lock()
# Health check socket
self.health_socket = None
# Control socket
self.control_socket = None
# Thread pool
self.threads = []
def broadcast_signal(self, signal: Dict):
"""Broadcast signal to all connected clients"""
message = json.dumps(signal) + "\n"
message_bytes = message.encode("utf-8")
disconnected = []
for conn in self.connections:
try:
conn.sendall(message_bytes)
self.logger.info(
f"Sent {signal['signal']} signal: {signal['timeframe']} @ {signal['price']} (conf: {signal['confidence']})"
)
except Exception as e:
self.logger.warning(f"Failed to send to client: {e}")
disconnected.append(conn)
with self.connections_lock:
disconnected = []
for conn in self.connections:
try:
conn.sendall(message_bytes)
self.logger.info(
f"[{signal['personality']}] Sent {signal['signal']} signal: "
f"{signal['timeframe']} @ {signal['price']} (conf: {signal['confidence']})"
)
except Exception as e:
self.logger.warning(f"Failed to send to client: {e}")
disconnected.append(conn)
# Remove disconnected clients
for conn in disconnected:
try:
conn.close()
except:
pass
self.connections.remove(conn)
# Remove disconnected clients
for conn in disconnected:
try:
conn.close()
except:
pass
self.connections.remove(conn)
def setup_signal_socket(self):
"""Setup Unix domain socket for signal streaming"""
@@ -485,7 +499,7 @@ class SignalGenerator:
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.socket.bind(self.config.socket_path)
self.socket.listen(5)
self.socket.settimeout(0.1) # Non-blocking accept
self.socket.settimeout(0.1)
self.logger.info(f"Signal socket listening on {self.config.socket_path}")
@@ -545,12 +559,10 @@ class SignalGenerator:
)
cursor = conn_c.cursor()
# Check total rows
cursor.execute("SELECT COUNT(*) FROM candles")
status["candles_db"]["row_count"] = cursor.fetchone()[0]
status["candles_db"]["accessible"] = True
# Check per-timeframe data
for tf in self.config.timeframes:
cursor.execute(
"SELECT COUNT(*), MAX(timestamp) FROM candles WHERE timeframe = ?",
@@ -578,7 +590,6 @@ class SignalGenerator:
status["analysis_db"]["row_count"] = cursor.fetchone()[0]
status["analysis_db"]["accessible"] = True
# Check per-timeframe data
for tf in self.config.timeframes:
cursor.execute(
"SELECT COUNT(*), MAX(timestamp) FROM analysis WHERE timeframe = ?",
@@ -602,28 +613,56 @@ class SignalGenerator:
try:
conn, _ = self.health_socket.accept()
uptime = datetime.now(timezone.utc) - self.stats["uptime_start"]
uptime = datetime.now(timezone.utc) - self.global_stats["uptime_start"]
db_status = self.check_database_status()
# Aggregate stats from both engines
total_stats = {
"total_signals": 0,
"buy_signals": 0,
"sell_signals": 0,
"errors": 0,
}
personality_stats = {}
recent_signals = []
for name, engine in self.engines.items():
with engine.lock:
personality_stats[name] = {
"total_signals": engine.stats["total_signals"],
"buy_signals": engine.stats["buy_signals"],
"sell_signals": engine.stats["sell_signals"],
"last_signal": engine.stats["last_signal_time"],
"errors": engine.stats["errors"],
"recent_signals": list(engine.signal_history)[-5:],
}
total_stats["total_signals"] += engine.stats["total_signals"]
total_stats["buy_signals"] += engine.stats["buy_signals"]
total_stats["sell_signals"] += engine.stats["sell_signals"]
total_stats["errors"] += engine.stats["errors"]
recent_signals.extend(list(engine.signal_history)[-5:])
# Sort recent signals by timestamp
recent_signals.sort(key=lambda x: x.get("timestamp", 0), reverse=True)
health = {
"status": "running",
"personality": self.config.personality,
"mode": "multi-personality",
"personalities": ["scalping", "swing"],
"timeframes": self.config.timeframes,
"uptime_seconds": int(uptime.total_seconds()),
"total_signals": self.stats["total_signals"],
"buy_signals": self.stats["buy_signals"],
"sell_signals": self.stats["sell_signals"],
"last_signal": self.stats["last_signal_time"],
"errors": self.stats["errors"],
"total_stats": total_stats,
"personality_stats": personality_stats,
"connected_clients": len(self.connections),
"recent_signals": list(self.signal_history)[-5:],
"recent_signals": recent_signals[:10],
"databases": db_status,
"config": {
"min_confidence": self.config.min_confidence,
"cooldown_seconds": self.config.cooldown_seconds,
"lookback": self.config.lookback,
"weights": self.config.weights[self.config.personality],
"reloads": self.stats["config_reloads"],
"weights": self.config.weights,
"reloads": self.global_stats["config_reloads"],
},
"debug_mode": self.debug_mode,
}
@@ -641,7 +680,6 @@ class SignalGenerator:
try:
conn, _ = self.control_socket.accept()
# Receive command
data = conn.recv(4096).decode("utf-8").strip()
if not data:
@@ -668,25 +706,17 @@ class SignalGenerator:
if action == "reload":
try:
old_personality = self.config.personality
old_confidence = self.config.min_confidence
self.config.reload()
self.stats["config_reloads"] += 1
self.global_stats["config_reloads"] += 1
self.logger.info(
f"Config reloaded: personality={self.config.personality}, "
f"min_confidence={self.config.min_confidence}"
)
self.logger.info(f"Config reloaded: min_confidence={self.config.min_confidence}")
return {
"status": "success",
"message": "Configuration reloaded",
"changes": {
"personality": {
"old": old_personality,
"new": self.config.personality,
},
"min_confidence": {
"old": old_confidence,
"new": self.config.min_confidence,
@@ -696,21 +726,6 @@ class SignalGenerator:
except Exception as e:
return {"status": "error", "message": str(e)}
elif action == "set_personality":
personality = cmd.get("value")
if personality in ["scalping", "swing"]:
self.config.personality = personality
self.logger.info(f"Personality changed to: {personality}")
return {
"status": "success",
"message": f"Personality set to {personality}",
}
else:
return {
"status": "error",
"message": "Invalid personality (use 'scalping' or 'swing')",
}
elif action == "set_confidence":
try:
confidence = float(cmd.get("value"))
@@ -755,21 +770,24 @@ class SignalGenerator:
}
elif action == "clear_cooldowns":
self.last_signal_time.clear()
self.logger.info("Signal cooldowns cleared")
for engine in self.engines.values():
engine.last_signal_time.clear()
self.logger.info("All signal cooldowns cleared")
return {"status": "success", "message": "All cooldowns cleared"}
elif action == "reset_stats":
self.stats = {
"total_signals": 0,
"buy_signals": 0,
"sell_signals": 0,
"last_signal_time": None,
"uptime_start": datetime.now(timezone.utc),
"errors": 0,
"config_reloads": self.stats["config_reloads"],
}
self.signal_history.clear()
for engine in self.engines.values():
with engine.lock:
engine.stats = {
"total_signals": 0,
"buy_signals": 0,
"sell_signals": 0,
"last_signal_time": None,
"errors": 0,
}
engine.signal_history.clear()
self.global_stats["uptime_start"] = datetime.now(timezone.utc)
self.logger.info("Statistics reset")
return {"status": "success", "message": "Statistics reset"}
@@ -780,15 +798,43 @@ class SignalGenerator:
"""Accept new client connections"""
try:
conn, _ = self.socket.accept()
self.connections.append(conn)
self.logger.info(
f"New client connected. Total clients: {len(self.connections)}"
)
with self.connections_lock:
self.connections.append(conn)
self.logger.info(
f"New client connected. Total clients: {len(self.connections)}"
)
except socket.timeout:
pass
except Exception as e:
self.logger.debug(f"Accept error: {e}")
def personality_worker(self, personality: str):
"""Worker thread for a specific personality"""
engine = self.engines[personality]
self.logger.info(f"[{personality}] Worker thread started")
while self.running:
try:
for timeframe in self.config.timeframes:
try:
signal = engine.generate_signal(timeframe)
if signal:
self.broadcast_signal(signal)
except Exception as e:
self.logger.error(f"[{personality}] Error processing {timeframe}: {e}")
with engine.lock:
engine.stats["errors"] += 1
time.sleep(self.config.poll_interval)
except Exception as e:
self.logger.error(f"[{personality}] Worker error: {e}")
with engine.lock:
engine.stats["errors"] += 1
time.sleep(1) # Brief pause on error
self.logger.info(f"[{personality}] Worker thread stopped")
def run(self):
"""Main processing loop"""
self.running = True
@@ -796,34 +842,29 @@ class SignalGenerator:
self.setup_health_socket()
self.setup_control_socket()
self.logger.info(
f"Signal generator started - Personality: {self.config.personality}"
)
self.logger.info("Multi-personality signal generator started")
self.logger.info(f"Running personalities: scalping, swing")
self.logger.info(f"Monitoring timeframes: {', '.join(self.config.timeframes)}")
self.logger.info(f"Poll interval: {self.config.poll_interval}s")
# Start personality worker threads
for personality in ["scalping", "swing"]:
thread = threading.Thread(
target=self.personality_worker,
args=(personality,),
name=f"{personality}-worker",
daemon=True
)
thread.start()
self.threads.append(thread)
try:
# Main thread handles connections and management
while self.running:
# Accept new connections
self.accept_connections()
# Handle health checks
self.handle_health_checks()
# Handle control commands
self.handle_control_commands()
# Generate signals for each timeframe
for timeframe in self.config.timeframes:
try:
signal = self.generate_signal(timeframe)
if signal:
self.broadcast_signal(signal)
except Exception as e:
self.logger.error(f"Error processing {timeframe}: {e}")
self.stats["errors"] += 1
time.sleep(self.config.poll_interval)
time.sleep(0.1)
except KeyboardInterrupt:
self.logger.info("Received interrupt signal")
@@ -833,12 +874,20 @@ class SignalGenerator:
def cleanup(self):
"""Cleanup resources"""
self.logger.info("Shutting down...")
self.running = False
for conn in self.connections:
try:
conn.close()
except:
pass
# Wait for worker threads
self.logger.info("Waiting for worker threads to finish...")
for thread in self.threads:
thread.join(timeout=2.0)
# Close connections
with self.connections_lock:
for conn in self.connections:
try:
conn.close()
except:
pass
if self.socket:
self.socket.close()
@@ -864,17 +913,17 @@ def main():
generator = SignalGenerator(config, logger)
# Signal handlers for hot-reload and control
# Signal handlers
def reload_config(sig, frame):
"""SIGUSR1: Reload configuration"""
logger.info("Received SIGUSR1 - Reloading configuration...")
try:
old_personality = config.personality
old_confidence = config.min_confidence
config.reload()
generator.stats["config_reloads"] += 1
generator.global_stats["config_reloads"] += 1
logger.info(
f"Configuration reloaded successfully "
f"(personality: {old_personality} -> {config.personality})"
f"(min_confidence: {old_confidence} -> {config.min_confidence})"
)
except Exception as e:
logger.error(f"Failed to reload configuration: {e}")

12
trader/Pipfile Normal file
View File

@@ -0,0 +1,12 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
rich = "*"
[dev-packages]
[requires]
python_version = "3.13"

139
trader/README.md Normal file
View File

@@ -0,0 +1,139 @@
# Paper Trading Bot
A real-time paper trading bot that executes trades based on signals from the multi-personality signal generator.
## Features
- **Real-time TUI Interface** - Beautiful terminal UI showing all trading activity
- **Multi-personality Support** - Trade signals from both scalping and swing strategies
- **Risk Management** - Configurable stop-loss, take-profit, and position sizing
- **SQLite Tracking** - All trades stored in database for analysis
- **Live Price Monitoring** - Reads current BTC price from candles.db
- **Signal Filtering** - Filter by confidence, personality, and timeframe
## Installation
```bash
cd traider
pip install -r requirements.txt
```
## Configuration
Edit `config.json` to customize:
```json
{
"initial_balance": 10000.0, // Starting capital
"position_size_percent": 2.0, // % of balance per trade
"max_positions": 3, // Max concurrent positions
"stop_loss_percent": 2.0, // Stop loss %
"take_profit_percent": 4.0, // Take profit %
"min_confidence": 0.5, // Minimum signal confidence
"enabled_personalities": ["scalping", "swing"],
"enabled_timeframes": ["1m", "5m"]
}
```
## Usage
Make sure the signal generator is running first:
```bash
# In signals/ directory
./signals.py
```
Then start the paper trader:
```bash
# In traider/ directory
./trader.py
```
## TUI Interface
The TUI displays:
- **Header**: Current BTC price, balance, equity, and total PnL
- **Statistics**: Win rate, total trades, wins/losses
- **Open Positions**: Live view of active trades with unrealized PnL
- **Recent Closed Trades**: Last 10 completed trades
- **Recent Signals**: Incoming signals from the generator
## Database Schema
### trades table
- Trade details (entry/exit prices, type, timeframe)
- PnL calculations
- Signal confidence and personality
- Exit reasons (Stop Loss, Take Profit)
### balance_history table
- Historical balance snapshots
- Equity tracking over time
## Trading Logic
1. **Signal Reception**: Listens to Unix socket from signal generator
2. **Filtering**: Applies confidence, personality, and timeframe filters
3. **Position Sizing**: Calculates position based on balance %
4. **Entry**: Opens LONG (BUY signal) or SHORT (SELL signal) position
5. **Exit Monitoring**: Continuously checks stop-loss and take-profit levels
6. **Closure**: Calculates PnL and updates balance
## Risk Management
- Maximum concurrent positions limit
- Stop-loss protection on every trade
- Position sizing based on account balance
- (Future: Daily loss limits, trailing stops)
## Keyboard Controls
- `Ctrl+C` - Graceful shutdown (closes socket connections, saves state)
## File Structure
```
traider/
├── trader.py # Main bot with TUI
├── config.json # Configuration
├── trades.db # Trade history (auto-created)
├── requirements.txt # Python dependencies
└── logs/ # (Future: logging)
```
## Example Output
```
┌─────────────────────────────────────────────────────────┐
│ PAPER TRADING BOT | BTC: $95,234.50 | Balance: $10,245.32 │
│ Equity: $10,387.12 | PnL: +$387.12 (+3.87%) │
└─────────────────────────────────────────────────────────┘
┌─────────────┐ ┌──────────────────────────┐
│ Statistics │ │ Open Positions (2) │
├─────────────┤ ├──────────────────────────┤
│ Total: 47 │ │ ID Type Entry PnL │
│ Wins: 28 │ │ 48 LONG 95100 +$142 │
│ Losses: 19 │ │ 49 LONG 95200 +$34 │
│ Win Rate: 60%│ └──────────────────────────┘
└─────────────┘
```
## Notes
- This is **paper trading only** - no real money involved
- Requires running signal generator and populated candles.db
- Price updates every 1 second from most recent candle
- Signals processed in real-time as they arrive
## Future Enhancements
- Trailing stop-loss implementation
- Daily loss limit enforcement
- Performance analytics dashboard
- Export trade history to CSV
- Backtesting mode
- Web dashboard option

20
trader/config.json Normal file
View File

@@ -0,0 +1,20 @@
{
"trades_db": "trades.db",
"candles_db": "../onramp/market_data.db",
"signal_socket": "/tmp/signals.sock",
"initial_balance": 100.0,
"position_size_percent": 10.0,
"max_positions": 3,
"stop_loss_percent": 2.0,
"take_profit_percent": 4.0,
"min_confidence": 0.5,
"enabled_personalities": ["scalping", "swing"],
"enabled_timeframes": ["1m", "5m"],
"max_daily_loss_percent": 5.0,
"trailing_stop": false,
"trailing_stop_percent": 1.5
}

701
trader/paper_trader.py Executable file
View File

@@ -0,0 +1,701 @@
#!/usr/bin/env python3
"""
Paper Trading Bot
Executes paper trades based on signals from the signal generator
Real-time TUI interface with trade tracking
"""
import sqlite3
import json
import socket
import time
import signal
import sys
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Dict, List
from dataclasses import dataclass, asdict
from enum import Enum
import threading
from collections import deque
from rich.console import Console
from rich.live import Live
from rich.table import Table
from rich.layout import Layout
from rich.panel import Panel
from rich.text import Text
from rich import box
class TradeStatus(Enum):
OPEN = "OPEN"
CLOSED = "CLOSED"
STOPPED = "STOPPED"
class TradeType(Enum):
LONG = "LONG"
SHORT = "SHORT"
@dataclass
class Trade:
id: Optional[int]
trade_type: str
entry_price: float
entry_time: str
position_size: float
stop_loss: float
take_profit: Optional[float]
timeframe: str
personality: str
signal_confidence: float
status: str
exit_price: Optional[float] = None
exit_time: Optional[str] = None
pnl: Optional[float] = None
pnl_percent: Optional[float] = None
exit_reason: Optional[str] = None
class Config:
def __init__(self, config_path: str = "config.json"):
self.config_path = config_path
self.reload()
def reload(self):
with open(self.config_path, "r") as f:
data = json.load(f)
# Database paths
self.trades_db = data.get("trades_db", "trades.db")
self.candles_db = data.get("candles_db", "../onramp/market_data.db")
# Signal socket
self.signal_socket = data.get("signal_socket", "/tmp/signals.sock")
# Trading parameters
self.initial_balance = data.get("initial_balance", 10000.0)
self.position_size_percent = data.get("position_size_percent", 2.0) # % of balance per trade
self.max_positions = data.get("max_positions", 3)
self.stop_loss_percent = data.get("stop_loss_percent", 2.0)
self.take_profit_percent = data.get("take_profit_percent", 4.0)
# Strategy filters
self.min_confidence = data.get("min_confidence", 0.5)
self.enabled_personalities = data.get("enabled_personalities", ["scalping", "swing"])
self.enabled_timeframes = data.get("enabled_timeframes", ["1m", "5m"])
# Risk management
self.max_daily_loss_percent = data.get("max_daily_loss_percent", 5.0)
self.trailing_stop = data.get("trailing_stop", False)
self.trailing_stop_percent = data.get("trailing_stop_percent", 1.5)
class TradeDatabase:
def __init__(self, db_path: str):
self.db_path = db_path
self.init_database()
def init_database(self):
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS trades (
id INTEGER PRIMARY KEY AUTOINCREMENT,
trade_type TEXT NOT NULL,
entry_price REAL NOT NULL,
entry_time TEXT NOT NULL,
position_size REAL NOT NULL,
stop_loss REAL NOT NULL,
take_profit REAL,
timeframe TEXT NOT NULL,
personality TEXT NOT NULL,
signal_confidence REAL NOT NULL,
status TEXT NOT NULL,
exit_price REAL,
exit_time TEXT,
pnl REAL,
pnl_percent REAL,
exit_reason TEXT
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS balance_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT NOT NULL,
balance REAL NOT NULL,
equity REAL NOT NULL,
open_positions INTEGER NOT NULL
)
""")
conn.commit()
conn.close()
def save_trade(self, trade: Trade) -> int:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
if trade.id is None:
cursor.execute("""
INSERT INTO trades (
trade_type, entry_price, entry_time, position_size,
stop_loss, take_profit, timeframe, personality,
signal_confidence, status
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
trade.trade_type, trade.entry_price, trade.entry_time,
trade.position_size, trade.stop_loss, trade.take_profit,
trade.timeframe, trade.personality, trade.signal_confidence,
trade.status
))
trade_id = cursor.lastrowid
else:
cursor.execute("""
UPDATE trades SET
exit_price = ?, exit_time = ?, pnl = ?,
pnl_percent = ?, status = ?, exit_reason = ?
WHERE id = ?
""", (
trade.exit_price, trade.exit_time, trade.pnl,
trade.pnl_percent, trade.status, trade.exit_reason,
trade.id
))
trade_id = trade.id
conn.commit()
conn.close()
return trade_id
def get_open_trades(self) -> List[Trade]:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("SELECT * FROM trades WHERE status = 'OPEN' ORDER BY entry_time DESC")
rows = cursor.fetchall()
conn.close()
trades = []
for row in rows:
trades.append(Trade(**dict(row)))
return trades
def get_recent_trades(self, limit: int = 10) -> List[Trade]:
conn = sqlite3.connect(self.db_path)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("""
SELECT * FROM trades
WHERE status != 'OPEN'
ORDER BY exit_time DESC
LIMIT ?
""", (limit,))
rows = cursor.fetchall()
conn.close()
trades = []
for row in rows:
trades.append(Trade(**dict(row)))
return trades
def save_balance_snapshot(self, balance: float, equity: float, open_positions: int):
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
INSERT INTO balance_history (timestamp, balance, equity, open_positions)
VALUES (?, ?, ?, ?)
""", (datetime.now(timezone.utc).isoformat(), balance, equity, open_positions))
conn.commit()
conn.close()
def get_statistics(self) -> Dict:
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# Total trades
cursor.execute("SELECT COUNT(*) FROM trades WHERE status != 'OPEN'")
total_trades = cursor.fetchone()[0]
# Win/Loss
cursor.execute("SELECT COUNT(*) FROM trades WHERE status != 'OPEN' AND pnl > 0")
wins = cursor.fetchone()[0]
cursor.execute("SELECT COUNT(*) FROM trades WHERE status != 'OPEN' AND pnl < 0")
losses = cursor.fetchone()[0]
# Total PnL
cursor.execute("SELECT SUM(pnl) FROM trades WHERE status != 'OPEN'")
total_pnl = cursor.fetchone()[0] or 0.0
# Average win/loss
cursor.execute("SELECT AVG(pnl) FROM trades WHERE status != 'OPEN' AND pnl > 0")
avg_win = cursor.fetchone()[0] or 0.0
cursor.execute("SELECT AVG(pnl) FROM trades WHERE status != 'OPEN' AND pnl < 0")
avg_loss = cursor.fetchone()[0] or 0.0
conn.close()
win_rate = (wins / total_trades * 100) if total_trades > 0 else 0.0
return {
"total_trades": total_trades,
"wins": wins,
"losses": losses,
"win_rate": win_rate,
"total_pnl": total_pnl,
"avg_win": avg_win,
"avg_loss": avg_loss,
}
class PriceMonitor:
"""Monitor current price from candles database"""
def __init__(self, candles_db: str):
self.candles_db = candles_db
self.current_price = None
self.last_update = None
def get_current_price(self) -> Optional[float]:
"""Get most recent close price from 1m timeframe"""
try:
conn = sqlite3.connect(f"file:{self.candles_db}?mode=ro", uri=True, timeout=5)
cursor = conn.cursor()
cursor.execute("""
SELECT close, timestamp
FROM candles
WHERE timeframe = '1m'
ORDER BY timestamp DESC
LIMIT 1
""")
row = cursor.fetchone()
conn.close()
if row:
self.current_price = float(row[0])
self.last_update = row[1]
return self.current_price
return None
except Exception as e:
return None
class PaperTrader:
def __init__(self, config: Config):
self.config = config
self.db = TradeDatabase(config.trades_db)
self.price_monitor = PriceMonitor(config.candles_db)
self.balance = config.initial_balance
self.open_trades: List[Trade] = []
self.recent_signals = deque(maxlen=20)
self.running = False
self.lock = threading.Lock()
# Stats
self.stats = {
"signals_received": 0,
"signals_filtered": 0,
"trades_opened": 0,
"trades_closed": 0,
}
# Load open trades from DB
self.open_trades = self.db.get_open_trades()
def connect_to_signals(self) -> socket.socket:
"""Connect to signal generator socket"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.config.signal_socket)
return sock
def calculate_position_size(self, price: float) -> float:
"""Calculate position size based on balance and config"""
position_value = self.balance * (self.config.position_size_percent / 100)
return position_value / price
def should_take_signal(self, signal: Dict) -> bool:
"""Filter signals based on config"""
# Check confidence
if signal.get("confidence", 0) < self.config.min_confidence:
return False
# Check personality
if signal.get("personality") not in self.config.enabled_personalities:
return False
# Check timeframe
if signal.get("timeframe") not in self.config.enabled_timeframes:
return False
# Check max positions
if len(self.open_trades) >= self.config.max_positions:
return False
return True
def open_trade(self, signal: Dict):
"""Open a new paper trade"""
with self.lock:
trade_type = TradeType.LONG.value if signal["signal"] == "BUY" else TradeType.SHORT.value
entry_price = signal["price"]
position_size = self.calculate_position_size(entry_price)
# Calculate stop loss and take profit
if trade_type == TradeType.LONG.value:
stop_loss = entry_price * (1 - self.config.stop_loss_percent / 100)
take_profit = entry_price * (1 + self.config.take_profit_percent / 100)
else:
stop_loss = entry_price * (1 + self.config.stop_loss_percent / 100)
take_profit = entry_price * (1 - self.config.take_profit_percent / 100)
trade = Trade(
id=None,
trade_type=trade_type,
entry_price=entry_price,
entry_time=signal.get("generated_at", datetime.now(timezone.utc).isoformat()),
position_size=position_size,
stop_loss=stop_loss,
take_profit=take_profit,
timeframe=signal["timeframe"],
personality=signal["personality"],
signal_confidence=signal["confidence"],
status=TradeStatus.OPEN.value,
)
trade_id = self.db.save_trade(trade)
trade.id = trade_id
self.open_trades.append(trade)
self.stats["trades_opened"] += 1
def check_exits(self, current_price: float):
"""Check if any open trades should be closed"""
with self.lock:
for trade in self.open_trades[:]:
should_exit = False
exit_reason = None
if trade.trade_type == TradeType.LONG.value:
# Long position checks
if current_price <= trade.stop_loss:
should_exit = True
exit_reason = "Stop Loss"
elif trade.take_profit and current_price >= trade.take_profit:
should_exit = True
exit_reason = "Take Profit"
else:
# Short position checks
if current_price >= trade.stop_loss:
should_exit = True
exit_reason = "Stop Loss"
elif trade.take_profit and current_price <= trade.take_profit:
should_exit = True
exit_reason = "Take Profit"
if should_exit:
self.close_trade(trade, current_price, exit_reason)
def close_trade(self, trade: Trade, exit_price: float, reason: str):
"""Close a trade and calculate PnL"""
trade.exit_price = exit_price
trade.exit_time = datetime.now(timezone.utc).isoformat()
trade.exit_reason = reason
# Calculate PnL
if trade.trade_type == TradeType.LONG.value:
pnl = (exit_price - trade.entry_price) * trade.position_size
else:
pnl = (trade.entry_price - exit_price) * trade.position_size
trade.pnl = pnl
trade.pnl_percent = (pnl / (trade.entry_price * trade.position_size)) * 100
trade.status = TradeStatus.CLOSED.value
self.balance += pnl
self.db.save_trade(trade)
self.open_trades.remove(trade)
self.stats["trades_closed"] += 1
def get_equity(self, current_price: float) -> float:
"""Calculate current equity (balance + unrealized PnL)"""
unrealized_pnl = 0.0
for trade in self.open_trades:
if trade.trade_type == TradeType.LONG.value:
pnl = (current_price - trade.entry_price) * trade.position_size
else:
pnl = (trade.entry_price - current_price) * trade.position_size
unrealized_pnl += pnl
return self.balance + unrealized_pnl
def process_signal(self, signal: Dict):
"""Process incoming signal"""
self.stats["signals_received"] += 1
self.recent_signals.append(signal)
if self.should_take_signal(signal):
self.open_trade(signal)
else:
self.stats["signals_filtered"] += 1
def signal_listener_thread(self):
"""Background thread to listen for signals"""
while self.running:
try:
sock = self.connect_to_signals()
sock.settimeout(1.0)
buffer = ""
while self.running:
try:
data = sock.recv(4096).decode("utf-8")
if not data:
break
buffer += data
while "\n" in buffer:
line, buffer = buffer.split("\n", 1)
if line.strip():
signal = json.loads(line)
self.process_signal(signal)
except socket.timeout:
continue
except json.JSONDecodeError:
continue
sock.close()
except Exception as e:
if self.running:
time.sleep(5) # Wait before reconnecting
def price_monitor_thread(self):
"""Background thread to monitor prices and check exits"""
while self.running:
current_price = self.price_monitor.get_current_price()
if current_price and self.open_trades:
self.check_exits(current_price)
time.sleep(1)
def build_tui(self) -> Layout:
"""Build the TUI layout"""
layout = Layout()
layout.split_column(
Layout(name="header", size=3),
Layout(name="body"),
Layout(name="footer", size=3),
)
layout["body"].split_row(
Layout(name="left"),
Layout(name="right"),
)
layout["left"].split_column(
Layout(name="stats", size=10),
Layout(name="open_trades"),
)
layout["right"].split_column(
Layout(name="recent_closed", size=15),
Layout(name="signals"),
)
return layout
def render_header(self) -> Panel:
"""Render header panel"""
current_price = self.price_monitor.current_price or 0.0
equity = self.get_equity(current_price)
pnl = equity - self.config.initial_balance
pnl_percent = (pnl / self.config.initial_balance) * 100
pnl_color = "green" if pnl >= 0 else "red"
text = Text()
text.append("PAPER TRADING BOT", style="bold cyan")
text.append(f" | BTC: ${current_price:,.2f}", style="yellow")
text.append(f" | Balance: ${self.balance:,.2f}", style="white")
text.append(f" | Equity: ${equity:,.2f}", style="white")
text.append(f" | PnL: ${pnl:+,.2f} ({pnl_percent:+.2f}%)", style=pnl_color)
return Panel(text, style="bold")
def render_stats(self) -> Panel:
"""Render statistics panel"""
db_stats = self.db.get_statistics()
table = Table(show_header=False, box=box.SIMPLE)
table.add_column("Metric", style="cyan")
table.add_column("Value", justify="right")
table.add_row("Total Trades", str(db_stats["total_trades"]))
table.add_row("Wins", f"[green]{db_stats['wins']}[/green]")
table.add_row("Losses", f"[red]{db_stats['losses']}[/red]")
table.add_row("Win Rate", f"{db_stats['win_rate']:.1f}%")
table.add_row("Total PnL", f"${db_stats['total_pnl']:,.2f}")
table.add_row("", "")
table.add_row("Signals RX", str(self.stats["signals_received"]))
table.add_row("Filtered", str(self.stats["signals_filtered"]))
return Panel(table, title="Statistics", border_style="blue")
def render_open_trades(self) -> Panel:
"""Render open trades table"""
table = Table(box=box.SIMPLE)
table.add_column("ID", style="cyan")
table.add_column("Type")
table.add_column("Entry", justify="right")
table.add_column("Current", justify="right")
table.add_column("PnL", justify="right")
table.add_column("TF")
table.add_column("Pers")
current_price = self.price_monitor.current_price or 0.0
for trade in self.open_trades:
if trade.trade_type == TradeType.LONG.value:
pnl = (current_price - trade.entry_price) * trade.position_size
type_color = "green"
else:
pnl = (trade.entry_price - current_price) * trade.position_size
type_color = "red"
pnl_color = "green" if pnl >= 0 else "red"
table.add_row(
str(trade.id),
f"[{type_color}]{trade.trade_type}[/{type_color}]",
f"${trade.entry_price:,.2f}",
f"${current_price:,.2f}",
f"[{pnl_color}]${pnl:+,.2f}[/{pnl_color}]",
trade.timeframe,
trade.personality[:4].upper(),
)
return Panel(table, title=f"Open Positions ({len(self.open_trades)})", border_style="green")
def render_recent_closed(self) -> Panel:
"""Render recent closed trades"""
table = Table(box=box.SIMPLE)
table.add_column("ID", style="cyan")
table.add_column("Type")
table.add_column("PnL", justify="right")
table.add_column("Exit")
recent = self.db.get_recent_trades(10)
for trade in recent:
type_color = "green" if trade.trade_type == TradeType.LONG.value else "red"
pnl_color = "green" if trade.pnl and trade.pnl >= 0 else "red"
table.add_row(
str(trade.id),
f"[{type_color}]{trade.trade_type}[/{type_color}]",
f"[{pnl_color}]${trade.pnl:+,.2f}[/{pnl_color}]" if trade.pnl else "N/A",
trade.exit_reason or "N/A",
)
return Panel(table, title="Recent Closed Trades", border_style="yellow")
def render_signals(self) -> Panel:
"""Render recent signals"""
table = Table(box=box.SIMPLE)
table.add_column("Signal")
table.add_column("TF")
table.add_column("Conf", justify="right")
table.add_column("Pers")
for signal in list(self.recent_signals)[-10:]:
signal_color = "green" if signal["signal"] == "BUY" else "red"
table.add_row(
f"[{signal_color}]{signal['signal']}[/{signal_color}]",
signal["timeframe"],
f"{signal['confidence']:.2f}",
signal["personality"][:4].upper(),
)
return Panel(table, title="Recent Signals", border_style="magenta")
def render_footer(self) -> Panel:
"""Render footer panel"""
text = Text()
text.append("Press ", style="dim")
text.append("Ctrl+C", style="bold red")
text.append(" to exit", style="dim")
return Panel(text, style="dim")
def update_display(self, layout: Layout):
"""Update all TUI panels"""
layout["header"].update(self.render_header())
layout["stats"].update(self.render_stats())
layout["open_trades"].update(self.render_open_trades())
layout["recent_closed"].update(self.render_recent_closed())
layout["signals"].update(self.render_signals())
layout["footer"].update(self.render_footer())
def run(self):
"""Main run loop with TUI"""
self.running = True
# Start background threads
signal_thread = threading.Thread(target=self.signal_listener_thread, daemon=True)
price_thread = threading.Thread(target=self.price_monitor_thread, daemon=True)
signal_thread.start()
price_thread.start()
# TUI
console = Console()
layout = self.build_tui()
try:
with Live(layout, console=console, screen=True, refresh_per_second=2):
while self.running:
self.update_display(layout)
time.sleep(0.5)
except KeyboardInterrupt:
pass
finally:
self.running = False
signal_thread.join(timeout=2)
price_thread.join(timeout=2)
def main():
config = Config()
trader = PaperTrader(config)
def shutdown(sig, frame):
trader.running = False
signal.signal(signal.SIGINT, shutdown)
signal.signal(signal.SIGTERM, shutdown)
trader.run()
if __name__ == "__main__":
main()