Spaces:
Running
on
Zero
Running
on
Zero
# Make sure to install dependencies: pip install sentence-transformers gradio torch spaces | |
# ZamAI Embeddings Inference Space with GPU support for Hugging Face Spaces | |
import gradio as gr | |
from sentence_transformers import SentenceTransformer | |
import torch | |
import spaces | |
MODEL_ID = "tasal9/Multilingual-ZamAI-Embeddings" | |
# Robust device selection | |
device = "cuda" if torch.cuda.is_available() and torch.cuda.device_count() > 0 else "cpu" | |
try: | |
# Try loading model with trust_remote_code=True for custom models. | |
model = SentenceTransformer(MODEL_ID, device=device, trust_remote_code=True) | |
model_status = f"Loaded model on {device.upper()}" | |
except Exception as e: | |
model = None | |
model_status = f"Model load error: {e}" | |
def embed_text(input_text): | |
if not input_text or not input_text.strip(): | |
return "Please enter some text to embed." | |
if model is None: | |
return model_status | |
try: | |
with torch.no_grad(): | |
emb = model.encode(input_text, device=device, show_progress_bar=False) | |
if hasattr(emb, "shape"): | |
shape = emb.shape | |
else: | |
shape = (len(emb),) | |
return f"Embedding shape: {shape}\n{emb}" | |
except Exception as e: | |
return f"Embedding error: {e}" | |
with gr.Blocks(title="Multilingual-ZamAI-Embeddings Inference") as demo: | |
gr.Markdown("# π Multilingual-ZamAI-Embeddings Inference Space") | |
gr.Markdown("Enter text to get its embedding using the ZamAI model.") | |
gr.Markdown(f"### System Status: `{model_status}`") | |
with gr.Row(): | |
with gr.Column(): | |
test_input = gr.Textbox(label="Input Text", lines=3, placeholder="Enter text to embed...") | |
test_btn = gr.Button("Get Embedding", variant="primary") | |
with gr.Column(): | |
test_output = gr.Textbox(label="Embedding Output", lines=8, interactive=False) | |
test_btn.click(embed_text, inputs=test_input, outputs=test_output) | |
if __name__ == "__main__": | |
demo.launch(server_name="0.0.0.0", server_port=7860) |