refactor: address architect review findings (6 items)
R1: Extend @safe to catch ValueError->400, simplify routes_backtest
(eliminated 4 copies of duplicated try/except)
R2: Consolidate PROVIDER constant into obb_utils.py (single source)
R3: Add days_ago() helper to obb_utils.py, replace 8+ duplications
R4: Extract Reddit/ApeWisdom into reddit_service.py from finnhub_service
R5: Fix missing top-level import asyncio in finnhub_service
R6: (deferred - sentiment logic extraction is a larger change)
All 561 tests passing.
This commit is contained in:
80
reddit_service.py
Normal file
80
reddit_service.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""Reddit stock sentiment via ApeWisdom API (free, no key needed)."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
APEWISDOM_URL = "https://apewisdom.io/api/v1.0/filter/all-stocks/page/1"
|
||||
TIMEOUT = 10.0
|
||||
|
||||
|
||||
async def get_reddit_sentiment(symbol: str) -> dict[str, Any]:
|
||||
"""Get Reddit sentiment for a symbol.
|
||||
|
||||
Tracks mentions and upvotes across r/wallstreetbets, r/stocks, r/investing.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
||||
resp = await client.get(APEWISDOM_URL)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
results = data.get("results", [])
|
||||
|
||||
match = next(
|
||||
(r for r in results if r.get("ticker", "").upper() == symbol.upper()),
|
||||
None,
|
||||
)
|
||||
if match is None:
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"found": False,
|
||||
"message": f"{symbol} not in Reddit top trending (not enough mentions)",
|
||||
}
|
||||
|
||||
mentions_prev = match.get("mentions_24h_ago", 0)
|
||||
mentions_now = match.get("mentions", 0)
|
||||
change_pct = (
|
||||
round((mentions_now - mentions_prev) / mentions_prev * 100, 1)
|
||||
if mentions_prev > 0
|
||||
else None
|
||||
)
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"found": True,
|
||||
"rank": match.get("rank"),
|
||||
"mentions_24h": mentions_now,
|
||||
"mentions_24h_ago": mentions_prev,
|
||||
"mentions_change_pct": change_pct,
|
||||
"upvotes": match.get("upvotes"),
|
||||
"rank_24h_ago": match.get("rank_24h_ago"),
|
||||
}
|
||||
except Exception:
|
||||
logger.warning("Reddit sentiment failed for %s", symbol, exc_info=True)
|
||||
return {"symbol": symbol, "error": "Failed to fetch Reddit sentiment"}
|
||||
|
||||
|
||||
async def get_reddit_trending() -> list[dict[str, Any]]:
|
||||
"""Get top trending stocks on Reddit (free, no key)."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=TIMEOUT) as client:
|
||||
resp = await client.get(APEWISDOM_URL)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
return [
|
||||
{
|
||||
"rank": r.get("rank"),
|
||||
"symbol": r.get("ticker"),
|
||||
"name": r.get("name"),
|
||||
"mentions_24h": r.get("mentions"),
|
||||
"upvotes": r.get("upvotes"),
|
||||
"rank_24h_ago": r.get("rank_24h_ago"),
|
||||
"mentions_24h_ago": r.get("mentions_24h_ago"),
|
||||
}
|
||||
for r in data.get("results", [])[:25]
|
||||
]
|
||||
except Exception:
|
||||
logger.warning("Reddit trending failed", exc_info=True)
|
||||
return []
|
||||
Reference in New Issue
Block a user