feat: aggregate all sentiment sources into composite score
Redesign /stock/{symbol}/sentiment to combine 4 data sources with
weighted scoring:
- News sentiment (Alpha Vantage, 25%) - article-level bullish/bearish
- Analyst recommendations (Finnhub, 30%) - buy/sell ratio
- Upgrade/downgrade activity (yfinance, 20%) - recent actions
- Reddit buzz (ApeWisdom, 25%) - mention change trend
Returns composite_score (-1 to +1), composite_label, per-source
scores, and full detail data from each source.
This commit is contained in:
@@ -23,22 +23,112 @@ router = APIRouter(prefix="/api/v1")
|
||||
@router.get("/stock/{symbol}/sentiment", response_model=ApiResponse)
|
||||
@safe
|
||||
async def stock_sentiment(symbol: str = Path(..., min_length=1, max_length=20)):
|
||||
"""Get aggregated sentiment: Alpha Vantage news sentiment + Finnhub analyst data."""
|
||||
"""Aggregated sentiment from all sources with composite score.
|
||||
|
||||
Combines: Alpha Vantage news sentiment, Finnhub analyst data,
|
||||
Reddit mentions, and analyst upgrades into a single composite score.
|
||||
Score range: -1.0 (extreme bearish) to +1.0 (extreme bullish).
|
||||
"""
|
||||
symbol = validate_symbol(symbol)
|
||||
finnhub_data, av_data = await asyncio.gather(
|
||||
finnhub_service.get_sentiment_summary(symbol),
|
||||
|
||||
# Fetch all sources in parallel
|
||||
av_data, finnhub_data, reddit_data, upgrades_data, recs_data = await asyncio.gather(
|
||||
alphavantage_service.get_news_sentiment(symbol, limit=20),
|
||||
finnhub_service.get_sentiment_summary(symbol),
|
||||
finnhub_service.get_reddit_sentiment(symbol),
|
||||
openbb_service.get_upgrades_downgrades(symbol, limit=10),
|
||||
finnhub_service.get_recommendation_trends(symbol),
|
||||
return_exceptions=True,
|
||||
)
|
||||
if isinstance(finnhub_data, BaseException):
|
||||
logger.exception("Finnhub error", exc_info=finnhub_data)
|
||||
finnhub_data = {}
|
||||
if isinstance(av_data, BaseException):
|
||||
logger.exception("Alpha Vantage error", exc_info=av_data)
|
||||
av_data = {}
|
||||
|
||||
data = {**finnhub_data, "alpha_vantage_sentiment": av_data}
|
||||
return ApiResponse(data=data)
|
||||
def _safe(result, default):
|
||||
return default if isinstance(result, BaseException) else result
|
||||
|
||||
av_data = _safe(av_data, {})
|
||||
finnhub_data = _safe(finnhub_data, {})
|
||||
reddit_data = _safe(reddit_data, {})
|
||||
upgrades_data = _safe(upgrades_data, [])
|
||||
recs_data = _safe(recs_data, [])
|
||||
|
||||
# --- Score each source ---
|
||||
scores: list[tuple[str, float, float]] = [] # (source, score, weight)
|
||||
|
||||
# 1. News sentiment (Alpha Vantage): avg_score ranges ~-0.35 to +0.35
|
||||
if isinstance(av_data, dict) and av_data.get("overall_sentiment"):
|
||||
av_score = av_data["overall_sentiment"].get("avg_score")
|
||||
if av_score is not None:
|
||||
# Normalize to -1..+1 (AV scores are typically -0.35 to +0.35)
|
||||
normalized = max(-1.0, min(1.0, av_score * 2.5))
|
||||
scores.append(("news", round(normalized, 3), 0.25))
|
||||
|
||||
# 2. Analyst recommendations (Finnhub): buy/sell ratio
|
||||
if isinstance(recs_data, list) and recs_data:
|
||||
latest = recs_data[0]
|
||||
total = sum(latest.get(k, 0) for k in ("strongBuy", "buy", "hold", "sell", "strongSell"))
|
||||
if total > 0:
|
||||
bullish = latest.get("strongBuy", 0) + latest.get("buy", 0)
|
||||
bearish = latest.get("sell", 0) + latest.get("strongSell", 0)
|
||||
ratio = (bullish - bearish) / total # -1 to +1
|
||||
scores.append(("analysts", round(ratio, 3), 0.30))
|
||||
|
||||
# 3. Analyst upgrades vs downgrades (yfinance)
|
||||
if isinstance(upgrades_data, list) and upgrades_data:
|
||||
ups = sum(1 for u in upgrades_data if u.get("action") in ("up", "init"))
|
||||
downs = sum(1 for u in upgrades_data if u.get("action") == "down")
|
||||
maintains = len(upgrades_data) - ups - downs
|
||||
if len(upgrades_data) > 0:
|
||||
upgrade_score = (ups - downs) / len(upgrades_data)
|
||||
scores.append(("upgrades", round(upgrade_score, 3), 0.20))
|
||||
|
||||
# 4. Reddit buzz (ApeWisdom)
|
||||
if isinstance(reddit_data, dict) and reddit_data.get("found"):
|
||||
mentions = reddit_data.get("mentions_24h", 0)
|
||||
change = reddit_data.get("mentions_change_pct")
|
||||
if change is not None and mentions > 10:
|
||||
# Positive change = bullish buzz, capped at +/- 1
|
||||
reddit_score = max(-1.0, min(1.0, change / 100))
|
||||
scores.append(("reddit", round(reddit_score, 3), 0.25))
|
||||
|
||||
# --- Compute weighted composite ---
|
||||
if scores:
|
||||
total_weight = sum(w for _, _, w in scores)
|
||||
composite = sum(s * w for _, s, w in scores) / total_weight
|
||||
composite = round(composite, 3)
|
||||
else:
|
||||
composite = None
|
||||
|
||||
# Map to label
|
||||
if composite is None:
|
||||
label = "Unknown"
|
||||
elif composite >= 0.5:
|
||||
label = "Strong Bullish"
|
||||
elif composite >= 0.15:
|
||||
label = "Bullish"
|
||||
elif composite >= -0.15:
|
||||
label = "Neutral"
|
||||
elif composite >= -0.5:
|
||||
label = "Bearish"
|
||||
else:
|
||||
label = "Strong Bearish"
|
||||
|
||||
return ApiResponse(data={
|
||||
"symbol": symbol,
|
||||
"composite_score": composite,
|
||||
"composite_label": label,
|
||||
"source_scores": {name: score for name, score, _ in scores},
|
||||
"source_weights": {name: weight for name, _, weight in scores},
|
||||
"details": {
|
||||
"news_sentiment": av_data if isinstance(av_data, dict) else {},
|
||||
"analyst_recommendations": recs_data[0] if isinstance(recs_data, list) and recs_data else {},
|
||||
"recent_upgrades": upgrades_data[:5] if isinstance(upgrades_data, list) else [],
|
||||
"reddit": reddit_data if isinstance(reddit_data, dict) else {},
|
||||
"finnhub_news": (
|
||||
finnhub_data.get("recent_news", [])[:5]
|
||||
if isinstance(finnhub_data, dict)
|
||||
else []
|
||||
),
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@router.get("/stock/{symbol}/news-sentiment", response_model=ApiResponse)
|
||||
|
||||
Reference in New Issue
Block a user