diff --git a/finnhub_service.py b/finnhub_service.py index 3914d25..8c93789 100644 --- a/finnhub_service.py +++ b/finnhub_service.py @@ -104,6 +104,134 @@ async def get_upgrade_downgrade( return data if isinstance(data, list) else [] +async def get_social_sentiment(symbol: str) -> dict[str, Any]: + """Get social media sentiment from Reddit and Twitter. + + Returns mention counts, positive/negative scores, and trends. + """ + if not _is_configured(): + return {"configured": False, "message": "Set INVEST_API_FINNHUB_API_KEY"} + start = (datetime.now() - timedelta(days=3)).strftime("%Y-%m-%d") + async with _client() as client: + resp = await client.get( + "/stock/social-sentiment", + params={"symbol": symbol, "from": start}, + ) + if resp.status_code in (403, 401): + logger.debug("social-sentiment requires premium, skipping") + return {"configured": True, "symbol": symbol, "premium_required": True, "reddit": [], "twitter": []} + resp.raise_for_status() + data = resp.json() + if not isinstance(data, dict): + return {"configured": True, "symbol": symbol, "reddit": [], "twitter": []} + reddit = data.get("reddit", []) + twitter = data.get("twitter", []) + + # Compute summary stats + reddit_summary = _summarize_social(reddit) if reddit else None + twitter_summary = _summarize_social(twitter) if twitter else None + + return { + "configured": True, + "symbol": symbol, + "reddit_summary": reddit_summary, + "twitter_summary": twitter_summary, + "reddit": reddit[-20:], + "twitter": twitter[-20:], + } + + +def _summarize_social(entries: list[dict[str, Any]]) -> dict[str, Any]: + """Summarize social sentiment entries into aggregate stats.""" + if not entries: + return {} + total_mentions = sum(e.get("mention", 0) for e in entries) + total_positive = sum(e.get("positiveScore", 0) for e in entries) + total_negative = sum(e.get("negativeScore", 0) for e in entries) + avg_score = sum(e.get("score", 0) for e in entries) / len(entries) + return { + "total_mentions": total_mentions, + "total_positive": total_positive, + "total_negative": total_negative, + "avg_score": round(avg_score, 4), + "data_points": len(entries), + } + + +async def get_reddit_sentiment(symbol: str) -> dict[str, Any]: + """Get Reddit sentiment from ApeWisdom (free, no key needed). + + Tracks mentions and upvotes across r/wallstreetbets, r/stocks, r/investing. + """ + try: + async with httpx.AsyncClient(timeout=10.0) as client: + resp = await client.get( + "https://apewisdom.io/api/v1.0/filter/all-stocks/page/1" + ) + resp.raise_for_status() + data = resp.json() + results = data.get("results", []) + + # Find the requested symbol + match = next( + (r for r in results if r.get("ticker", "").upper() == symbol.upper()), + None, + ) + if match is None: + return { + "symbol": symbol, + "found": False, + "message": f"{symbol} not in Reddit top trending (not enough mentions)", + } + + mentions_prev = match.get("mentions_24h_ago", 0) + mentions_now = match.get("mentions", 0) + change_pct = ( + round((mentions_now - mentions_prev) / mentions_prev * 100, 1) + if mentions_prev > 0 + else None + ) + return { + "symbol": symbol, + "found": True, + "rank": match.get("rank"), + "mentions_24h": mentions_now, + "mentions_24h_ago": mentions_prev, + "mentions_change_pct": change_pct, + "upvotes": match.get("upvotes"), + "rank_24h_ago": match.get("rank_24h_ago"), + } + except Exception: + logger.warning("Reddit sentiment failed for %s", symbol, exc_info=True) + return {"symbol": symbol, "error": "Failed to fetch Reddit sentiment"} + + +async def get_reddit_trending() -> list[dict[str, Any]]: + """Get top trending stocks on Reddit (ApeWisdom, free, no key).""" + try: + async with httpx.AsyncClient(timeout=10.0) as client: + resp = await client.get( + "https://apewisdom.io/api/v1.0/filter/all-stocks/page/1" + ) + resp.raise_for_status() + data = resp.json() + return [ + { + "rank": r.get("rank"), + "symbol": r.get("ticker"), + "name": r.get("name"), + "mentions_24h": r.get("mentions"), + "upvotes": r.get("upvotes"), + "rank_24h_ago": r.get("rank_24h_ago"), + "mentions_24h_ago": r.get("mentions_24h_ago"), + } + for r in data.get("results", [])[:25] + ] + except Exception: + logger.warning("Reddit trending failed", exc_info=True) + return [] + + async def get_sentiment_summary(symbol: str) -> dict[str, Any]: """Aggregate all sentiment data for a symbol into one response.""" if not _is_configured(): diff --git a/routes_sentiment.py b/routes_sentiment.py index e20ff13..00991ca 100644 --- a/routes_sentiment.py +++ b/routes_sentiment.py @@ -101,3 +101,33 @@ async def stock_upgrades(symbol: str = Path(..., min_length=1, max_length=20)): symbol = validate_symbol(symbol) data = await openbb_service.get_upgrades_downgrades(symbol) return ApiResponse(data=data) + + +@router.get("/stock/{symbol}/social-sentiment", response_model=ApiResponse) +@safe +async def stock_social_sentiment( + symbol: str = Path(..., min_length=1, max_length=20), +): + """Social media sentiment from Reddit and Twitter (Finnhub).""" + symbol = validate_symbol(symbol) + data = await finnhub_service.get_social_sentiment(symbol) + return ApiResponse(data=data) + + +@router.get("/stock/{symbol}/reddit-sentiment", response_model=ApiResponse) +@safe +async def stock_reddit_sentiment( + symbol: str = Path(..., min_length=1, max_length=20), +): + """Reddit sentiment: mentions, upvotes, rank on WSB/stocks/investing (free, no key).""" + symbol = validate_symbol(symbol) + data = await finnhub_service.get_reddit_sentiment(symbol) + return ApiResponse(data=data) + + +@router.get("/discover/reddit-trending", response_model=ApiResponse) +@safe +async def reddit_trending(): + """Top 25 trending stocks on Reddit (WSB, r/stocks, r/investing). Free, no key.""" + data = await finnhub_service.get_reddit_trending() + return ApiResponse(data=data)