Spaces:
Sleeping
Sleeping
import os | |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' | |
import logging | |
import io | |
import pickle | |
from pathlib import Path | |
from fastapi import FastAPI, File, UploadFile, HTTPException | |
from fastapi.staticfiles import StaticFiles | |
from fastapi.responses import FileResponse | |
from pydantic import BaseModel | |
from PIL import Image | |
import torch | |
import torch.nn.functional as F | |
from transformers import BertTokenizer, BertForSequenceClassification | |
import tensorflow as tf | |
import numpy as np | |
from huggingface_hub import hf_hub_download | |
# --- Configuration --- | |
logging.basicConfig(level=logging.INFO) | |
STATIC_DIR = Path("static") | |
# --- Device Configuration --- | |
device = torch.device('cpu') | |
try: | |
tf.config.set_visible_devices([], 'GPU') | |
logging.info("TensorFlow GPU disabled. Using CPU.") | |
except (RuntimeError, ValueError) as e: | |
logging.warning(f"Could not disable GPU for TensorFlow: {e}") | |
# --- Model Loading --- | |
def load_models(): | |
logging.info("Loading all models from the Hub...") | |
try: | |
tokenizer = BertTokenizer.from_pretrained("muhalwan/sental") | |
sentiment_model = BertForSequenceClassification.from_pretrained("muhalwan/sental") | |
sentiment_model.to(device) | |
logging.info("Sentiment analysis model loaded successfully.") | |
except Exception as e: | |
tokenizer, sentiment_model = None, None | |
logging.error(f"Error loading sentiment model: {e}") | |
try: | |
model_path = hf_hub_download(repo_id="muhalwan/catndog", filename="catdog_best.keras") | |
cat_dog_model = tf.keras.models.load_model(model_path, compile=False) | |
logging.info("Cat & Dog classifier model loaded successfully.") | |
except Exception as e: | |
cat_dog_model = None | |
logging.error(f"Error loading cat & dog model: {e}") | |
try: | |
xgb_model_path = hf_hub_download(repo_id="muhalwan/california_housing_price_predictor", filename="xgb_model.pkl") | |
with open(xgb_model_path, "rb") as f: | |
housing_model = pickle.load(f) | |
scaler_path = hf_hub_download(repo_id="muhalwan/california_housing_price_predictor", filename="scaler.pkl") | |
with open(scaler_path, "rb") as f: | |
housing_scaler = pickle.load(f) | |
logging.info("Housing price model and scaler loaded successfully.") | |
except Exception as e: | |
housing_model, housing_scaler = None, None | |
logging.error(f"Error loading housing price model: {e}") | |
return tokenizer, sentiment_model, cat_dog_model, housing_model, housing_scaler | |
# --- FastAPI App Initialization --- | |
app = FastAPI() | |
tokenizer, sentiment_model, cat_dog_model, housing_model, housing_scaler = load_models() | |
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static") | |
class SentimentRequest(BaseModel): | |
text: str | |
class HousingRequest(BaseModel): | |
MedInc: float | |
HouseAge: float | |
AveRooms: float | |
AveBedrms: float | |
Population: float | |
AveOccup: float | |
Latitude: float | |
Longitude: float | |
# --- API Endpoints --- | |
async def read_root(): | |
return FileResponse('index.html') | |
async def predict_sentiment(request: SentimentRequest): | |
if not tokenizer or not sentiment_model: | |
raise HTTPException(status_code=503, detail="Sentiment model is not available.") | |
try: | |
inputs = tokenizer(request.text, return_tensors='pt', truncation=True, max_length=512) | |
inputs = {k: v.to(device) for k, v in inputs.items()} | |
with torch.no_grad(): | |
outputs = sentiment_model(**inputs) | |
probabilities = F.softmax(outputs.logits, dim=-1).squeeze() | |
labels = ['Bearish', 'Bullish'] | |
prediction = labels[torch.argmax(probabilities).item()] | |
return {"prediction": prediction} | |
except Exception as e: | |
logging.error(f"Sentiment prediction error: {e}") | |
raise HTTPException(status_code=500, detail="An error occurred during sentiment analysis.") | |
async def predict_catdog(file: UploadFile = File(...)): | |
if not cat_dog_model: | |
raise HTTPException(status_code=503, detail="Cat & Dog model is not available.") | |
try: | |
contents = await file.read() | |
image = Image.open(io.BytesIO(contents)) | |
_, height, width, _ = cat_dog_model.input_shape | |
img_resized = image.resize((width, height)) | |
if img_resized.mode == 'RGBA': | |
img_resized = img_resized.convert('RGB') | |
img_array = tf.keras.utils.img_to_array(img_resized) | |
img_array = tf.keras.applications.efficientnet.preprocess_input(img_array) | |
img_array = np.expand_dims(img_array, axis=0) | |
prob = cat_dog_model.predict(img_array, verbose=0)[0, 0] | |
label = "Dog" if prob >= 0.5 else "Cat" | |
return {"prediction": label} | |
except Exception as e: | |
logging.error(f"Cat/Dog prediction error: {e}") | |
raise HTTPException(status_code=500, detail="An error occurred during image classification.") | |
async def predict_housing(request: HousingRequest): | |
if not housing_model or not housing_scaler: | |
raise HTTPException(status_code=503, detail="Housing model is not available.") | |
try: | |
input_data = np.array([[ | |
request.MedInc, request.HouseAge, request.AveRooms, request.AveBedrms, | |
request.Population, request.AveOccup, request.Latitude, request.Longitude | |
]]) | |
data_scaled = housing_scaler.transform(input_data) | |
raw_prediction = housing_model.predict(data_scaled)[0] | |
final_prediction = raw_prediction * 100000 | |
return {"prediction": f"${final_prediction:,.2f}"} | |
except Exception as e: | |
logging.error(f"Housing prediction error: {e}") | |
raise HTTPException(status_code=500, detail="An error occurred during housing price prediction.") | |