ShazaAly's picture
Update app.py
b74cf70 verified
raw
history blame contribute delete
960 Bytes
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import pipeline
# Define the data model for the request body
class RequestModel(BaseModel):
text: str
# Initialize the FastAPI app
app = FastAPI()
# Load the model pipeline once when the app starts
# This is efficient as it doesn't reload the model on every request
try:
classifier = pipeline("text-classification", model="ShazaAly/syplyd-marbert-1")
print("Model loaded successfully!")
except Exception as e:
classifier = None
print(f"Error loading model: {e}")
@app.get("/")
def read_root():
return {"status": "online", "model": "ShazaAly/syplyd-marbert-1"}
@app.post("/classify")
def classify_intent(request: RequestModel):
if not classifier:
return {"error": "Model could not be loaded."}, 500
# The text is in request.text
results = classifier(request.text)
return results[0] # Return the first (and only) result dictionary