fix: resolve 8 critical issues from code review

C1: Fix BacktestDataLoader constructor args (was passing wrong kwargs)
C2: Fix BacktestResult attribute names (max_drawdown_pct, avg_hold_duration)
C3: Remove insecure JWT secret default (now required via env var)
C4: Fix .env.example to use TRADING_ prefix for all config vars
C5: Add missing fields to portfolio endpoint (daily_pnl_pct, total_pnl, trading_active)
C6: Add missing /portfolio/metrics endpoint
C7: Add 'value' field to equity curve response for frontend compatibility
C8: Add 6M/ALL periods and case-insensitive period enum parsing
Also: make app creation lazy to avoid config validation at import time
This commit is contained in:
Viktor Barzin 2026-02-22 17:48:40 +00:00
parent 870961f3e9
commit 2a56727267
No known key found for this signature in database
GPG key ID: 0EB088298288D958
5 changed files with 103 additions and 27 deletions

View file

@ -1,23 +1,28 @@
# PostgreSQL
POSTGRES_PASSWORD=trading
# Trading Bot
TRADING_DATABASE_URL=postgresql+asyncpg://trading:trading@localhost:5432/trading
TRADING_REDIS_URL=redis://localhost:6379/0
# Trading Bot — shared config (env_prefix = "TRADING_")
TRADING_DATABASE_URL=postgresql+asyncpg://trading:trading@postgres:5432/trading
TRADING_REDIS_URL=redis://redis:6379/0
TRADING_LOG_LEVEL=INFO
# Alpaca (paper trading)
ALPACA_API_KEY=your_api_key_here
ALPACA_SECRET_KEY=your_secret_key_here
ALPACA_BASE_URL=https://paper-api.alpaca.markets
TRADING_ALPACA_API_KEY=your_api_key_here
TRADING_ALPACA_SECRET_KEY=your_secret_key_here
TRADING_ALPACA_BASE_URL=https://paper-api.alpaca.markets
# JWT
JWT_SECRET_KEY=change-me-in-production
# JWT — REQUIRED, generate with: python -c "import secrets; print(secrets.token_hex(32))"
TRADING_JWT_SECRET_KEY=
# Reddit (for news fetcher)
REDDIT_CLIENT_ID=your_client_id
REDDIT_CLIENT_SECRET=your_client_secret
REDDIT_USER_AGENT=trading-bot/0.1
TRADING_REDDIT_CLIENT_ID=your_client_id
TRADING_REDDIT_CLIENT_SECRET=your_client_secret
TRADING_REDDIT_USER_AGENT=trading-bot/0.1
# Ollama
OLLAMA_HOST=http://localhost:11434
# Ollama — use Docker service name inside compose
TRADING_OLLAMA_HOST=http://ollama:11434
# WebAuthn — update for production domain
TRADING_RP_ID=localhost
TRADING_RP_NAME=Trading Bot
TRADING_RP_ORIGIN=http://localhost:5173

View file

@ -10,8 +10,8 @@ class ApiGatewayConfig(BaseConfig):
prefixed with ``TRADING_``.
"""
# JWT settings
jwt_secret_key: str = "CHANGE-ME-IN-PRODUCTION"
# JWT settings — TRADING_JWT_SECRET_KEY must be set in environment
jwt_secret_key: str
jwt_algorithm: str = "HS256"
access_token_expire_minutes: int = 15
refresh_token_expire_days: int = 7

View file

@ -99,5 +99,6 @@ def create_app(config: ApiGatewayConfig | None = None) -> FastAPI:
return app
# Convenience: allow ``uvicorn services.api_gateway.main:app``
app = create_app()
def get_app() -> FastAPI:
"""Lazy app factory for uvicorn: ``uvicorn services.api_gateway.main:get_app --factory``."""
return create_app()

View file

@ -94,15 +94,11 @@ async def _run_backtest_task(
engine = BacktestEngine(config=bt_config, strategies=strategies)
# Use an in-memory stub data loader for now; a full implementation
# would read from TimescaleDB.
# Use an empty data loader for now; a full implementation
# would load historical bars from TimescaleDB.
from backtester.data_loader import BacktestDataLoader
data_loader = BacktestDataLoader(
session_factory=None,
start_date=config.start_date,
end_date=config.end_date,
)
data_loader = BacktestDataLoader(bars=[], sentiments=[])
result = await engine.run(data_loader)
@ -117,12 +113,12 @@ async def _run_backtest_task(
"annualized_return": result.annualized_return,
"sharpe_ratio": result.sharpe_ratio,
"sortino_ratio": result.sortino_ratio,
"max_drawdown": result.max_drawdown,
"max_drawdown_pct": result.max_drawdown_pct,
"max_drawdown_duration_days": result.max_drawdown_duration_days,
"win_rate": result.win_rate,
"avg_win_loss_ratio": result.avg_win_loss_ratio,
"trade_count": result.trade_count,
"avg_hold_duration_hours": result.avg_hold_duration_hours,
"profit_factor": result.profit_factor,
"avg_hold_duration_seconds": result.avg_hold_duration.total_seconds(),
},
"completed_at": datetime.now(tz=timezone.utc).isoformat(),
}),

View file

@ -18,7 +18,19 @@ class HistoryPeriod(str, Enum):
ONE_WEEK = "1w"
ONE_MONTH = "1m"
THREE_MONTHS = "3m"
SIX_MONTHS = "6m"
ONE_YEAR = "1y"
ALL = "all"
@classmethod
def _missing_(cls, value: object) -> HistoryPeriod | None:
"""Accept uppercase variants like '1D', '1M', 'ALL'."""
if isinstance(value, str):
lower = value.lower()
for member in cls:
if member.value == lower:
return member
return None
def _period_to_timedelta(period: HistoryPeriod) -> timedelta:
@ -28,7 +40,9 @@ def _period_to_timedelta(period: HistoryPeriod) -> timedelta:
HistoryPeriod.ONE_WEEK: timedelta(weeks=1),
HistoryPeriod.ONE_MONTH: timedelta(days=30),
HistoryPeriod.THREE_MONTHS: timedelta(days=90),
HistoryPeriod.SIX_MONTHS: timedelta(days=180),
HistoryPeriod.ONE_YEAR: timedelta(days=365),
HistoryPeriod.ALL: timedelta(days=365 * 10), # effectively "all time"
}
return mapping[period]
@ -57,13 +71,25 @@ async def get_portfolio(
"cash": 0.0,
"buying_power": 0.0,
"daily_pnl": 0.0,
"daily_pnl_pct": 0.0,
"total_pnl": 0.0,
"total_pnl_pct": 0.0,
"trading_active": True,
}
# Compute percentage fields from snapshot data
daily_pnl_pct = (latest.daily_pnl / (latest.total_value - latest.daily_pnl) * 100.0
if latest.total_value != latest.daily_pnl else 0.0)
return {
"total_value": latest.total_value,
"cash": latest.cash,
"buying_power": latest.cash,
"daily_pnl": latest.daily_pnl,
"daily_pnl_pct": round(daily_pnl_pct, 2),
"total_pnl": latest.daily_pnl, # TODO: compute cumulative P&L from first snapshot
"total_pnl_pct": round(daily_pnl_pct, 2),
"trading_active": True, # TODO: read from Redis trading pause flag
}
@ -94,6 +120,53 @@ async def get_positions(
]
@router.get("/metrics")
async def get_portfolio_metrics(
request: Request,
_user: dict = Depends(get_current_user),
) -> dict:
"""Aggregate portfolio performance metrics — ROI, Sharpe, win rate, drawdown."""
from shared.models.trading import Trade, TradeStatus
from shared.models.timeseries import StrategyMetric
db = request.app.state.db_session_factory
async with db() as session:
# Total trades and win rate from trades table
trades_result = await session.execute(
select(Trade).where(Trade.status == TradeStatus.FILLED)
)
trades = trades_result.scalars().all()
# Latest strategy metrics for Sharpe
metrics_result = await session.execute(
select(StrategyMetric)
.order_by(desc(StrategyMetric.timestamp))
.limit(10)
)
strategy_metrics = metrics_result.scalars().all()
total_trades = len(trades)
winning = sum(1 for t in trades if t.pnl is not None and t.pnl > 0)
win_rate = winning / total_trades if total_trades > 0 else 0.0
total_pnl = sum(t.pnl for t in trades if t.pnl is not None)
# Approximate ROI from P&L (rough — proper calculation needs initial capital)
roi = total_pnl / 100_000.0 * 100.0 # assumes 100k starting capital
# Average Sharpe from strategy metrics
sharpe_values = [m.sharpe_ratio for m in strategy_metrics if m.sharpe_ratio is not None]
avg_sharpe = sum(sharpe_values) / len(sharpe_values) if sharpe_values else 0.0
return {
"roi": round(roi, 4),
"sharpe": round(avg_sharpe, 2),
"win_rate": round(win_rate, 4),
"max_drawdown": 0.0, # TODO: compute from portfolio snapshots
"total_trades": total_trades,
"avg_hold_duration": "0h", # TODO: compute from trade outcomes
}
@router.get("/history")
async def get_portfolio_history(
request: Request,
@ -116,6 +189,7 @@ async def get_portfolio_history(
return [
{
"timestamp": s.timestamp.isoformat(),
"value": s.total_value,
"total_value": s.total_value,
"cash": s.cash,
"positions_value": s.positions_value,