|
|
|
|
|
""" |
|
|
Direct API Router - Complete REST Endpoints |
|
|
All external API integrations exposed through REST endpoints |
|
|
NO PIPELINES - Direct model loading and inference |
|
|
""" |
|
|
|
|
|
from fastapi import APIRouter, HTTPException, Query, Body |
|
|
from fastapi.responses import JSONResponse |
|
|
from typing import Optional, List, Dict, Any |
|
|
from pydantic import BaseModel |
|
|
from datetime import datetime |
|
|
import logging |
|
|
|
|
|
|
|
|
from backend.services.direct_model_loader import direct_model_loader |
|
|
from backend.services.dataset_loader import crypto_dataset_loader |
|
|
from backend.services.external_api_clients import ( |
|
|
alternative_me_client, |
|
|
reddit_client, |
|
|
rss_feed_client |
|
|
) |
|
|
from backend.services.coingecko_client import coingecko_client |
|
|
from backend.services.binance_client import binance_client |
|
|
from backend.services.crypto_news_client import crypto_news_client |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
router = APIRouter( |
|
|
prefix="/api/v1", |
|
|
tags=["Direct API - External Services"] |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class SentimentRequest(BaseModel): |
|
|
"""Sentiment analysis request""" |
|
|
text: str |
|
|
model_key: Optional[str] = "cryptobert_elkulako" |
|
|
|
|
|
|
|
|
class BatchSentimentRequest(BaseModel): |
|
|
"""Batch sentiment analysis request""" |
|
|
texts: List[str] |
|
|
model_key: Optional[str] = "cryptobert_elkulako" |
|
|
|
|
|
|
|
|
class DatasetQueryRequest(BaseModel): |
|
|
"""Dataset query request""" |
|
|
dataset_key: str |
|
|
filters: Optional[Dict[str, Any]] = None |
|
|
limit: int = 100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/coingecko/price") |
|
|
async def get_coingecko_prices( |
|
|
symbols: Optional[str] = Query(None, description="Comma-separated symbols (e.g., BTC,ETH)"), |
|
|
limit: int = Query(100, description="Maximum number of coins") |
|
|
): |
|
|
""" |
|
|
Get real-time cryptocurrency prices from CoinGecko |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/coingecko/price?symbols=BTC,ETH` |
|
|
- `/api/v1/coingecko/price?limit=50` |
|
|
""" |
|
|
try: |
|
|
symbol_list = symbols.split(",") if symbols else None |
|
|
result = await coingecko_client.get_market_prices( |
|
|
symbols=symbol_list, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"data": result, |
|
|
"source": "coingecko", |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β CoinGecko price endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/coingecko/trending") |
|
|
async def get_coingecko_trending( |
|
|
limit: int = Query(10, description="Number of trending coins") |
|
|
): |
|
|
""" |
|
|
Get trending cryptocurrencies from CoinGecko |
|
|
""" |
|
|
try: |
|
|
result = await coingecko_client.get_trending_coins(limit=limit) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"data": result, |
|
|
"source": "coingecko", |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β CoinGecko trending endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/binance/klines") |
|
|
async def get_binance_klines( |
|
|
symbol: str = Query(..., description="Symbol (e.g., BTC, BTCUSDT)"), |
|
|
timeframe: str = Query("1h", description="Timeframe (1m, 5m, 15m, 1h, 4h, 1d)"), |
|
|
limit: int = Query(1000, description="Number of candles (max 1000)") |
|
|
): |
|
|
""" |
|
|
Get OHLCV candlestick data from Binance |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/binance/klines?symbol=BTC&timeframe=1h&limit=100` |
|
|
- `/api/v1/binance/klines?symbol=ETHUSDT&timeframe=4h&limit=500` |
|
|
""" |
|
|
try: |
|
|
result = await binance_client.get_ohlcv( |
|
|
symbol=symbol, |
|
|
timeframe=timeframe, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"data": result, |
|
|
"source": "binance", |
|
|
"symbol": symbol, |
|
|
"timeframe": timeframe, |
|
|
"count": len(result), |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Binance klines endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/ohlcv/{symbol}") |
|
|
async def get_ohlcv( |
|
|
symbol: str, |
|
|
interval: str = Query("1d", description="Interval: 1m, 5m, 15m, 1h, 4h, 1d"), |
|
|
limit: int = Query(30, description="Number of candles") |
|
|
): |
|
|
""" |
|
|
Get OHLCV data for a cryptocurrency symbol |
|
|
|
|
|
This endpoint provides a unified interface for OHLCV data with automatic fallback. |
|
|
Tries Binance first, then CoinGecko as fallback. |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/ohlcv/BTC?interval=1d&limit=30` |
|
|
- `/api/v1/ohlcv/ETH?interval=1h&limit=100` |
|
|
""" |
|
|
try: |
|
|
|
|
|
try: |
|
|
binance_symbol = f"{symbol.upper()}USDT" |
|
|
result = await binance_client.get_ohlcv( |
|
|
symbol=binance_symbol, |
|
|
timeframe=interval, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"symbol": symbol.upper(), |
|
|
"interval": interval, |
|
|
"data": result, |
|
|
"source": "binance", |
|
|
"count": len(result), |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
except Exception as binance_error: |
|
|
logger.warning(f"β Binance failed for {symbol}: {binance_error}") |
|
|
|
|
|
|
|
|
try: |
|
|
coin_id = symbol.lower() |
|
|
result = await coingecko_client.get_ohlc( |
|
|
coin_id=coin_id, |
|
|
days=30 if interval == "1d" else 7 |
|
|
) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"symbol": symbol.upper(), |
|
|
"interval": interval, |
|
|
"data": result, |
|
|
"source": "coingecko", |
|
|
"count": len(result), |
|
|
"timestamp": datetime.utcnow().isoformat(), |
|
|
"fallback_used": True |
|
|
} |
|
|
except Exception as coingecko_error: |
|
|
logger.error(f"β Both Binance and CoinGecko failed for {symbol}") |
|
|
raise HTTPException( |
|
|
status_code=503, |
|
|
detail=f"Failed to fetch OHLCV data: Binance error: {str(binance_error)}, CoinGecko error: {str(coingecko_error)}" |
|
|
) |
|
|
|
|
|
except HTTPException: |
|
|
raise |
|
|
except Exception as e: |
|
|
logger.error(f"β OHLCV endpoint failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/binance/ticker") |
|
|
async def get_binance_ticker( |
|
|
symbol: str = Query(..., description="Symbol (e.g., BTC)") |
|
|
): |
|
|
""" |
|
|
Get 24-hour ticker data from Binance |
|
|
""" |
|
|
try: |
|
|
result = await binance_client.get_24h_ticker(symbol=symbol) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"data": result, |
|
|
"source": "binance", |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Binance ticker endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/alternative/fng") |
|
|
async def get_fear_greed_index( |
|
|
limit: int = Query(1, description="Number of historical data points") |
|
|
): |
|
|
""" |
|
|
Get Fear & Greed Index from Alternative.me |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/alternative/fng` - Current index |
|
|
- `/api/v1/alternative/fng?limit=30` - Last 30 days |
|
|
""" |
|
|
try: |
|
|
result = await alternative_me_client.get_fear_greed_index(limit=limit) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Alternative.me endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/reddit/top") |
|
|
async def get_reddit_top_posts( |
|
|
subreddit: str = Query("cryptocurrency", description="Subreddit name"), |
|
|
time_filter: str = Query("day", description="Time filter (hour, day, week, month)"), |
|
|
limit: int = Query(25, description="Number of posts") |
|
|
): |
|
|
""" |
|
|
Get top posts from Reddit cryptocurrency subreddits |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/reddit/top?subreddit=cryptocurrency&time_filter=day&limit=25` |
|
|
- `/api/v1/reddit/top?subreddit=bitcoin&time_filter=week&limit=50` |
|
|
""" |
|
|
try: |
|
|
result = await reddit_client.get_top_posts( |
|
|
subreddit=subreddit, |
|
|
time_filter=time_filter, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Reddit endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/reddit/new") |
|
|
async def get_reddit_new_posts( |
|
|
subreddit: str = Query("cryptocurrency", description="Subreddit name"), |
|
|
limit: int = Query(25, description="Number of posts") |
|
|
): |
|
|
""" |
|
|
Get new posts from Reddit cryptocurrency subreddits |
|
|
""" |
|
|
try: |
|
|
result = await reddit_client.get_new_posts( |
|
|
subreddit=subreddit, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Reddit endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/rss/feed") |
|
|
async def get_rss_feed( |
|
|
feed_name: str = Query(..., description="Feed name (coindesk, cointelegraph, bitcoinmagazine, decrypt, theblock)"), |
|
|
limit: int = Query(20, description="Number of articles") |
|
|
): |
|
|
""" |
|
|
Get news articles from RSS feeds |
|
|
|
|
|
Available feeds: coindesk, cointelegraph, bitcoinmagazine, decrypt, theblock |
|
|
|
|
|
Examples: |
|
|
- `/api/v1/rss/feed?feed_name=coindesk&limit=20` |
|
|
- `/api/v1/rss/feed?feed_name=cointelegraph&limit=10` |
|
|
""" |
|
|
try: |
|
|
result = await rss_feed_client.fetch_feed( |
|
|
feed_name=feed_name, |
|
|
limit=limit |
|
|
) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β RSS feed endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/rss/all") |
|
|
async def get_all_rss_feeds( |
|
|
limit_per_feed: int = Query(10, description="Articles per feed") |
|
|
): |
|
|
""" |
|
|
Get news articles from all RSS feeds |
|
|
""" |
|
|
try: |
|
|
result = await rss_feed_client.fetch_all_feeds( |
|
|
limit_per_feed=limit_per_feed |
|
|
) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β RSS all feeds endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/coindesk/rss") |
|
|
async def get_coindesk_rss( |
|
|
limit: int = Query(20, description="Number of articles") |
|
|
): |
|
|
""" |
|
|
Get CoinDesk RSS feed |
|
|
|
|
|
Direct endpoint: https://www.coindesk.com/arc/outboundfeeds/rss/ |
|
|
""" |
|
|
try: |
|
|
result = await rss_feed_client.fetch_feed("coindesk", limit) |
|
|
return result |
|
|
except Exception as e: |
|
|
logger.error(f"β CoinDesk RSS failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/cointelegraph/rss") |
|
|
async def get_cointelegraph_rss( |
|
|
limit: int = Query(20, description="Number of articles") |
|
|
): |
|
|
""" |
|
|
Get CoinTelegraph RSS feed |
|
|
|
|
|
Direct endpoint: https://cointelegraph.com/rss |
|
|
""" |
|
|
try: |
|
|
result = await rss_feed_client.fetch_feed("cointelegraph", limit) |
|
|
return result |
|
|
except Exception as e: |
|
|
logger.error(f"β CoinTelegraph RSS failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/news/latest") |
|
|
async def get_latest_crypto_news( |
|
|
limit: int = Query(20, description="Number of articles") |
|
|
): |
|
|
""" |
|
|
Get latest cryptocurrency news from multiple sources |
|
|
(Aggregates NewsAPI, CryptoPanic, and RSS feeds) |
|
|
""" |
|
|
try: |
|
|
result = await crypto_news_client.get_latest_news(limit=limit) |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"data": result, |
|
|
"count": len(result), |
|
|
"source": "aggregated", |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Crypto news endpoint failed: {e}") |
|
|
raise HTTPException(status_code=503, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/hf/sentiment") |
|
|
async def analyze_sentiment(request: SentimentRequest): |
|
|
""" |
|
|
Analyze sentiment using HuggingFace models with automatic fallback |
|
|
|
|
|
Available models (in fallback order): |
|
|
- cryptobert_elkulako (default): ElKulako/cryptobert |
|
|
- cryptobert_kk08: kk08/CryptoBERT |
|
|
- finbert: ProsusAI/finbert |
|
|
- twitter_sentiment: cardiffnlp/twitter-roberta-base-sentiment |
|
|
|
|
|
Example: |
|
|
```json |
|
|
{ |
|
|
"text": "Bitcoin price is surging to new heights!", |
|
|
"model_key": "cryptobert_elkulako" |
|
|
} |
|
|
``` |
|
|
""" |
|
|
|
|
|
fallback_models = [ |
|
|
request.model_key, |
|
|
"cryptobert_kk08", |
|
|
"finbert", |
|
|
"twitter_sentiment" |
|
|
] |
|
|
|
|
|
last_error = None |
|
|
|
|
|
for model_key in fallback_models: |
|
|
try: |
|
|
result = await direct_model_loader.predict_sentiment( |
|
|
text=request.text, |
|
|
model_key=model_key |
|
|
) |
|
|
|
|
|
|
|
|
if model_key != request.model_key: |
|
|
result["fallback_used"] = True |
|
|
result["primary_model"] = request.model_key |
|
|
result["actual_model"] = model_key |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.warning(f"β Model {model_key} failed: {e}") |
|
|
last_error = e |
|
|
continue |
|
|
|
|
|
|
|
|
logger.error(f"β All sentiment models failed. Last error: {last_error}") |
|
|
raise HTTPException( |
|
|
status_code=503, |
|
|
detail={ |
|
|
"error": "All sentiment models unavailable", |
|
|
"message": "Sentiment analysis service is temporarily unavailable", |
|
|
"tried_models": fallback_models, |
|
|
"last_error": str(last_error), |
|
|
"degraded_response": { |
|
|
"sentiment": "neutral", |
|
|
"score": 0.5, |
|
|
"confidence": 0.0, |
|
|
"method": "fallback", |
|
|
"warning": "Using degraded mode - all models unavailable" |
|
|
} |
|
|
} |
|
|
) |
|
|
|
|
|
|
|
|
@router.post("/hf/sentiment/batch") |
|
|
async def analyze_sentiment_batch(request: BatchSentimentRequest): |
|
|
""" |
|
|
Batch sentiment analysis (NO PIPELINE) |
|
|
|
|
|
Example: |
|
|
```json |
|
|
{ |
|
|
"texts": [ |
|
|
"Bitcoin is mooning!", |
|
|
"Ethereum looks bearish today", |
|
|
"Market is neutral" |
|
|
], |
|
|
"model_key": "cryptobert_elkulako" |
|
|
} |
|
|
``` |
|
|
""" |
|
|
try: |
|
|
result = await direct_model_loader.batch_predict_sentiment( |
|
|
texts=request.texts, |
|
|
model_key=request.model_key |
|
|
) |
|
|
|
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Batch sentiment analysis failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/hf/models") |
|
|
async def get_loaded_models(): |
|
|
""" |
|
|
Get list of loaded HuggingFace models |
|
|
""" |
|
|
try: |
|
|
result = direct_model_loader.get_loaded_models() |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Get models failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.post("/hf/models/load") |
|
|
async def load_model( |
|
|
model_key: str = Query(..., description="Model key to load") |
|
|
): |
|
|
""" |
|
|
Load a specific HuggingFace model |
|
|
|
|
|
Available models: |
|
|
- cryptobert_elkulako |
|
|
- cryptobert_kk08 |
|
|
- finbert |
|
|
- twitter_sentiment |
|
|
""" |
|
|
try: |
|
|
result = await direct_model_loader.load_model(model_key) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Load model failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.post("/hf/models/load-all") |
|
|
async def load_all_models(): |
|
|
""" |
|
|
Load all configured HuggingFace models |
|
|
""" |
|
|
try: |
|
|
result = await direct_model_loader.load_all_models() |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Load all models failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/hf/datasets") |
|
|
async def get_loaded_datasets(): |
|
|
""" |
|
|
Get list of loaded HuggingFace datasets |
|
|
""" |
|
|
try: |
|
|
result = crypto_dataset_loader.get_loaded_datasets() |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Get datasets failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.post("/hf/datasets/load") |
|
|
async def load_dataset( |
|
|
dataset_key: str = Query(..., description="Dataset key to load"), |
|
|
split: Optional[str] = Query(None, description="Dataset split"), |
|
|
streaming: bool = Query(False, description="Enable streaming") |
|
|
): |
|
|
""" |
|
|
Load a specific HuggingFace dataset |
|
|
|
|
|
Available datasets: |
|
|
- cryptocoin: linxy/CryptoCoin |
|
|
- bitcoin_btc_usdt: WinkingFace/CryptoLM-Bitcoin-BTC-USDT |
|
|
- ethereum_eth_usdt: WinkingFace/CryptoLM-Ethereum-ETH-USDT |
|
|
- solana_sol_usdt: WinkingFace/CryptoLM-Solana-SOL-USDT |
|
|
- ripple_xrp_usdt: WinkingFace/CryptoLM-Ripple-XRP-USDT |
|
|
""" |
|
|
try: |
|
|
result = await crypto_dataset_loader.load_dataset( |
|
|
dataset_key=dataset_key, |
|
|
split=split, |
|
|
streaming=streaming |
|
|
) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Load dataset failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.post("/hf/datasets/load-all") |
|
|
async def load_all_datasets( |
|
|
streaming: bool = Query(False, description="Enable streaming") |
|
|
): |
|
|
""" |
|
|
Load all configured HuggingFace datasets |
|
|
""" |
|
|
try: |
|
|
result = await crypto_dataset_loader.load_all_datasets(streaming=streaming) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Load all datasets failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/hf/datasets/sample") |
|
|
async def get_dataset_sample( |
|
|
dataset_key: str = Query(..., description="Dataset key"), |
|
|
num_samples: int = Query(10, description="Number of samples"), |
|
|
split: Optional[str] = Query(None, description="Dataset split") |
|
|
): |
|
|
""" |
|
|
Get sample rows from a dataset |
|
|
""" |
|
|
try: |
|
|
result = await crypto_dataset_loader.get_dataset_sample( |
|
|
dataset_key=dataset_key, |
|
|
num_samples=num_samples, |
|
|
split=split |
|
|
) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Get dataset sample failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.post("/hf/datasets/query") |
|
|
async def query_dataset(request: DatasetQueryRequest): |
|
|
""" |
|
|
Query dataset with filters |
|
|
|
|
|
Example: |
|
|
```json |
|
|
{ |
|
|
"dataset_key": "bitcoin_btc_usdt", |
|
|
"filters": {"price": 50000}, |
|
|
"limit": 100 |
|
|
} |
|
|
``` |
|
|
""" |
|
|
try: |
|
|
result = await crypto_dataset_loader.query_dataset( |
|
|
dataset_key=request.dataset_key, |
|
|
filters=request.filters, |
|
|
limit=request.limit |
|
|
) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Query dataset failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
@router.get("/hf/datasets/stats") |
|
|
async def get_dataset_stats( |
|
|
dataset_key: str = Query(..., description="Dataset key") |
|
|
): |
|
|
""" |
|
|
Get statistics about a dataset |
|
|
""" |
|
|
try: |
|
|
result = await crypto_dataset_loader.get_dataset_stats(dataset_key=dataset_key) |
|
|
return result |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β Get dataset stats failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@router.get("/status") |
|
|
async def get_system_status(): |
|
|
""" |
|
|
Get overall system status |
|
|
""" |
|
|
try: |
|
|
models_info = direct_model_loader.get_loaded_models() |
|
|
datasets_info = crypto_dataset_loader.get_loaded_datasets() |
|
|
|
|
|
return { |
|
|
"success": True, |
|
|
"status": "operational", |
|
|
"models": { |
|
|
"total_configured": models_info["total_configured"], |
|
|
"total_loaded": models_info["total_loaded"], |
|
|
"device": models_info["device"] |
|
|
}, |
|
|
"datasets": { |
|
|
"total_configured": datasets_info["total_configured"], |
|
|
"total_loaded": datasets_info["total_loaded"] |
|
|
}, |
|
|
"external_apis": { |
|
|
"coingecko": "available", |
|
|
"binance": "available", |
|
|
"alternative_me": "available", |
|
|
"reddit": "available", |
|
|
"rss_feeds": "available" |
|
|
}, |
|
|
"timestamp": datetime.utcnow().isoformat() |
|
|
} |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"β System status failed: {e}") |
|
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
__all__ = ["router"] |
|
|
|