feat: integrate quantitative, calendar, market data endpoints
Add 3 new service layers and route modules: - quantitative_service: Sharpe ratio, CAPM, normality tests, unit root tests - calendar_service: earnings/dividends/IPO/splits calendars, estimates, SEC ownership - market_service: ETF, index, crypto, forex, options, futures data Total endpoints: 50. All use free providers (yfinance, SEC). Update README with comprehensive endpoint documentation.
This commit is contained in:
147
quantitative_service.py
Normal file
147
quantitative_service.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""Quantitative analysis: risk metrics, performance, CAPM, normality tests."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from openbb import obb
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PROVIDER = "yfinance"
|
||||
|
||||
# Need 252+ trading days for default window; 730 calendar days is safe
|
||||
PERF_DAYS = 730
|
||||
TARGET = "close"
|
||||
|
||||
|
||||
async def get_performance_metrics(symbol: str, days: int = 365) -> dict[str, Any]:
|
||||
"""Calculate Sharpe ratio, summary stats, and volatility for a symbol."""
|
||||
# Need at least 252 trading days for Sharpe window
|
||||
fetch_days = max(days, PERF_DAYS)
|
||||
start = (datetime.now() - timedelta(days=fetch_days)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
hist = await asyncio.to_thread(
|
||||
obb.equity.price.historical, symbol, start_date=start, provider=PROVIDER
|
||||
)
|
||||
if not hist or not hist.results:
|
||||
return {"symbol": symbol, "error": "No historical data"}
|
||||
|
||||
sharpe_result, summary_result, stdev_result = await asyncio.gather(
|
||||
asyncio.to_thread(
|
||||
obb.quantitative.performance.sharpe_ratio,
|
||||
data=hist.results, target=TARGET,
|
||||
),
|
||||
asyncio.to_thread(
|
||||
obb.quantitative.summary, data=hist.results, target=TARGET
|
||||
),
|
||||
asyncio.to_thread(
|
||||
obb.quantitative.stats.stdev, data=hist.results, target=TARGET
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
sharpe = _safe_last(sharpe_result) if not isinstance(sharpe_result, BaseException) else None
|
||||
summary = _extract_single(summary_result) if not isinstance(summary_result, BaseException) else {}
|
||||
stdev = _safe_last(stdev_result) if not isinstance(stdev_result, BaseException) else None
|
||||
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"period_days": days,
|
||||
"sharpe_ratio": sharpe,
|
||||
"summary": summary,
|
||||
"stdev": stdev,
|
||||
}
|
||||
except Exception:
|
||||
logger.warning("Performance metrics failed for %s", symbol, exc_info=True)
|
||||
return {"symbol": symbol, "error": "Failed to compute performance metrics"}
|
||||
|
||||
|
||||
async def get_capm(symbol: str) -> dict[str, Any]:
|
||||
"""Calculate CAPM metrics: beta, alpha, systematic/idiosyncratic risk."""
|
||||
start = (datetime.now() - timedelta(days=PERF_DAYS)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
hist = await asyncio.to_thread(
|
||||
obb.equity.price.historical, symbol, start_date=start, provider=PROVIDER
|
||||
)
|
||||
if not hist or not hist.results:
|
||||
return {"symbol": symbol, "error": "No historical data"}
|
||||
|
||||
capm = await asyncio.to_thread(
|
||||
obb.quantitative.capm, data=hist.results, target=TARGET
|
||||
)
|
||||
return {"symbol": symbol, **_extract_single(capm)}
|
||||
except Exception:
|
||||
logger.warning("CAPM failed for %s", symbol, exc_info=True)
|
||||
return {"symbol": symbol, "error": "Failed to compute CAPM"}
|
||||
|
||||
|
||||
async def get_normality_test(symbol: str, days: int = 365) -> dict[str, Any]:
|
||||
"""Run normality tests (Jarque-Bera, Shapiro-Wilk, etc.) on returns."""
|
||||
fetch_days = max(days, PERF_DAYS)
|
||||
start = (datetime.now() - timedelta(days=fetch_days)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
hist = await asyncio.to_thread(
|
||||
obb.equity.price.historical, symbol, start_date=start, provider=PROVIDER
|
||||
)
|
||||
if not hist or not hist.results:
|
||||
return {"symbol": symbol, "error": "No historical data"}
|
||||
|
||||
norm = await asyncio.to_thread(
|
||||
obb.quantitative.normality, data=hist.results, target=TARGET
|
||||
)
|
||||
return {"symbol": symbol, **_extract_single(norm)}
|
||||
except Exception:
|
||||
logger.warning("Normality test failed for %s", symbol, exc_info=True)
|
||||
return {"symbol": symbol, "error": "Failed to compute normality tests"}
|
||||
|
||||
|
||||
async def get_unitroot_test(symbol: str, days: int = 365) -> dict[str, Any]:
|
||||
"""Run unit root tests (ADF, KPSS) for stationarity."""
|
||||
fetch_days = max(days, PERF_DAYS)
|
||||
start = (datetime.now() - timedelta(days=fetch_days)).strftime("%Y-%m-%d")
|
||||
|
||||
try:
|
||||
hist = await asyncio.to_thread(
|
||||
obb.equity.price.historical, symbol, start_date=start, provider=PROVIDER
|
||||
)
|
||||
if not hist or not hist.results:
|
||||
return {"symbol": symbol, "error": "No historical data"}
|
||||
|
||||
ur = await asyncio.to_thread(
|
||||
obb.quantitative.unitroot_test, data=hist.results, target=TARGET
|
||||
)
|
||||
return {"symbol": symbol, **_extract_single(ur)}
|
||||
except Exception:
|
||||
logger.warning("Unit root test failed for %s", symbol, exc_info=True)
|
||||
return {"symbol": symbol, "error": "Failed to compute unit root test"}
|
||||
|
||||
|
||||
def _extract_single(result: Any) -> dict[str, Any]:
|
||||
"""Extract data from an OBBject result (single model or list)."""
|
||||
if result is None:
|
||||
return {}
|
||||
items = getattr(result, "results", None)
|
||||
if items is None:
|
||||
return {}
|
||||
if hasattr(items, "model_dump"):
|
||||
return items.model_dump()
|
||||
if isinstance(items, list) and items:
|
||||
last = items[-1]
|
||||
return last.model_dump() if hasattr(last, "model_dump") else {}
|
||||
return {}
|
||||
|
||||
|
||||
def _safe_last(result: Any) -> dict[str, Any] | None:
|
||||
"""Get the last item from a list result, or None."""
|
||||
if result is None:
|
||||
return None
|
||||
items = getattr(result, "results", None)
|
||||
if items is None or not isinstance(items, list) or not items:
|
||||
return None
|
||||
last = items[-1]
|
||||
return last.model_dump() if hasattr(last, "model_dump") else None
|
||||
Reference in New Issue
Block a user