criando_chatbot / app.py
Anderson432's picture
Update app.py
df297aa verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# Carregar o modelo e tokenizer
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
# Fun莽茫o para gerar resposta
def generate_response(user_input, chat_history=None):
if chat_history is None:
chat_history = []
# Codificar a entrada do usu谩rio
new_user_input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')
# Concatenar a entrada do usu谩rio com o hist贸rico da conversa
if chat_history:
bot_input_ids = torch.cat([chat_history, new_user_input_ids], dim=-1)
else:
bot_input_ids = new_user_input_ids
# Gerar resposta
response_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
# Decodificar a resposta
response = tokenizer.decode(response_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
# Atualizar o hist贸rico da conversa
chat_history = response_ids
return response, chat_history
# Interface Gradio
demo = gr.Interface(
fn=generate_response,
inputs=["text"],
outputs=["text"],
title="DialoGPT Conversa",
description="Converse com o modelo DialoGPT",
allow_flagging="never"
)
# Inicializar o hist贸rico da conversa
chat_history = None
# Lan莽ar a interface
demo.launch()