|
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline |
|
import gradio as gr |
|
|
|
|
|
model_name = "aubmindlab/aragpt2-medium" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
|
|
|
|
generator = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
|
|
|
|
def generate_arabic_text(prompt): |
|
outputs = generator(prompt, max_length=20, num_return_sequences=1, do_sample=True) |
|
return outputs[0]["generated_text"] |
|
|
|
iface = gr.Interface(fn=generate_arabic_text, |
|
inputs=gr.Textbox(lines=2, placeholder="اكتب جملة بالعربية..."), |
|
outputs="text", |
|
title="AraGPT2-Medium Chat", |
|
description="نموذج توليدي باللغة العربية") |
|
|
|
iface.launch() |