Spaces:
Sleeping
Sleeping
File size: 4,839 Bytes
cc26c0c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
# -*- coding: utf-8 -*-
"""nino bot
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1UgXple_p_R-0mq9p5vhOmFPo9cgdayJy
"""
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "microsoft/DialoGPT-small"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
rules = {
"hi": "Hello! How can I help you today?",
"hello": "Hi there! How can I assist you?",
"hey": "Hey! What can I do for you?",
"how are you": "I'm just a bot, but I'm doing great! π How about you?",
"good morning": "Good morning! Hope you have a wonderful day!",
"good afternoon": "Good afternoon! How can I help you?",
"good evening": "Good evening! What can I do for you?",
"bye": "Goodbye! Have a nice day! π",
"thank you": "You're welcome! π",
"thanks": "No problem! Happy to help!",
"what is your name": "I'm your friendly chatbot assistant.",
"help": "Sure! Ask me anything or type 'bye' to exit.",
"what can you do": "I can answer simple questions and chat with you. Try saying hi!",
"tell me a joke": "Why did the computer show up at work late? It had a hard drive!",
"what time is it": "Sorry, I don't have a clock yet. But you can check your device's time!",
"where are you from": "I'm from the cloud, here to assist you anytime!",
"what is ai": "AI stands for Artificial Intelligence, which is intelligence demonstrated by machines.",
"who created you": "I was created by a talented developer using Python and machine learning!",
"how can i learn programming": "Start with basics like Python. There are many free tutorials online to get you started!",
'ok':'ok',
'who are you?':'I am nino',
'hi nino' : 'hi there',
}
def respond(user_input, history):
if history is None:
history = []
user_input_clean = user_input.lower().strip()
if user_input_clean in rules:
bot_reply = rules[user_input_clean]
else:
prompt = ""
# Build the prompt including the conversation history
for user_msg, bot_msg in history:
# Ensure bot_msg is not None before adding to prompt
if bot_msg is not None:
prompt += f"{user_msg} {tokenizer.eos_token}\n{bot_msg} {tokenizer.eos_token}\n"
else:
# If bot_msg is None, just add the user message
prompt += f"{user_msg} {tokenizer.eos_token}\n"
# Add the current user input
prompt += f"{user_input} {tokenizer.eos_token}\n"
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
**inputs,
max_new_tokens=100,
pad_token_id=tokenizer.eos_token_id,
do_sample=True,
temperature=0.7,
top_p=0.9,
)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Extract the newly generated bot response from the full text
# This assumes the model output starts with the prompt
bot_reply = generated_text[len(prompt):].strip()
if len(bot_reply) < 5 or bot_reply.lower() in ["", "idk", "i don't know", "huh"]:
bot_reply = "I'm not sure how to respond to that. Can you rephrase it?"
# Append the new interaction to the history
history.append((user_input, bot_reply))
return history, history
def save_chat(history):
# Ensure history is not None before attempting to save
if history is not None:
with open("chat_history.txt", "w", encoding="utf-8") as f:
for user_msg, bot_msg in history:
# Ensure bot_msg is not None before writing
if bot_msg is not None:
f.write(f"You: {user_msg}\nBot: {bot_msg}\n\n")
else:
f.write(f"You: {user_msg}\nBot: (No response)\n\n")
# New function to process input, respond, save, and clear the textbox
def process_input(user_input, history):
# Get the updated history and bot response
updated_history, _ = respond(user_input, history)
# Save the updated chat history
save_chat(updated_history)
# Return the updated history for the chatbot display and an empty string for the textbox
return updated_history, "", updated_history # Also return updated history for the state
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Type your message here...")
state = gr.State([]) # This state variable holds the chat history
# Update the submit action to call the new function
# Add 'msg' to the outputs so its value can be updated
msg.submit(process_input, inputs=[msg, state], outputs=[chatbot, msg, state])
demo.launch()
|