from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline import gradio as gr # تحميل النموذج model_name = "aubmindlab/aragpt2-medium" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) # إعداد الـ pipeline generator = pipeline("text-generation", model=model, tokenizer=tokenizer) # واجهة Gradio def generate_arabic_text(prompt): outputs = generator(prompt, max_length=20, num_return_sequences=1, do_sample=True) return outputs[0]["generated_text"] iface = gr.Interface(fn=generate_arabic_text, inputs=gr.Textbox(lines=2, placeholder="اكتب جملة بالعربية..."), outputs="text", title="AraGPT2-Medium Chat", description="نموذج توليدي باللغة العربية") iface.launch()