zahraa12355 commited on
Commit
111bd60
·
verified ·
1 Parent(s): 2625e36

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -26
app.py CHANGED
@@ -6,14 +6,15 @@ Automatically generated by Colab.
6
  Original file is located at
7
  https://colab.research.google.com/drive/1UgXple_p_R-0mq9p5vhOmFPo9cgdayJy
8
  """
9
-
10
  import gradio as gr
11
  from transformers import AutoModelForCausalLM, AutoTokenizer
12
 
 
13
  model_name = "microsoft/DialoGPT-small"
14
  tokenizer = AutoTokenizer.from_pretrained(model_name)
15
  model = AutoModelForCausalLM.from_pretrained(model_name)
16
 
 
17
  rules = {
18
  "hi": "Hello! How can I help you today?",
19
  "hello": "Hi there! How can I assist you?",
@@ -34,19 +35,15 @@ rules = {
34
  "what is ai": "AI stands for Artificial Intelligence, which is intelligence demonstrated by machines.",
35
  "who created you": "I was created by a talented developer using Python and machine learning!",
36
  "how can i learn programming": "Start with basics like Python. There are many free tutorials online to get you started!",
37
- 'ok': 'ok',
38
- 'who are you?': 'I am nino',
39
- 'hi nino': 'hi there',
40
  }
41
 
42
  def respond(user_input, history):
43
- # Defensive type checks
44
  if not isinstance(history, list):
45
- print(f"Warning: Expected list for history but got {type(history)}. Resetting history.")
46
  history = []
47
-
48
  if not isinstance(user_input, str):
49
- print(f"Warning: Expected str for user_input but got {type(user_input)}. Coercing to str.")
50
  user_input = str(user_input)
51
 
52
  user_input_clean = user_input.lower().strip()
@@ -79,33 +76,37 @@ def respond(user_input, history):
79
  bot_reply = "I'm not sure how to respond to that. Can you rephrase it?"
80
 
81
  history.append((user_input, bot_reply))
82
- return history, history
83
 
84
  def save_chat(history):
85
- if history is not None:
86
- try:
87
- with open("/tmp/chat_history.txt", "w", encoding="utf-8") as f:
88
- for user_msg, bot_msg in history:
89
- if bot_msg is not None:
90
- f.write(f"You: {user_msg}\nBot: {bot_msg}\n\n")
91
- else:
92
- f.write(f"You: {user_msg}\nBot: (No response)\n\n")
93
- except Exception as e:
94
- print(f"Error saving chat: {e}")
95
 
96
  def process_input(user_input, history):
97
- if history is None or not isinstance(history, list):
98
- history = []
99
- updated_history, _ = respond(user_input, history)
100
- save_chat(updated_history)
101
- return updated_history, "", updated_history
 
 
 
 
 
102
 
103
  with gr.Blocks() as demo:
104
  chatbot = gr.Chatbot()
105
- msg = gr.Textbox(placeholder="Type your message here...")
106
  state = gr.State([])
107
 
108
  msg.submit(process_input, inputs=[msg, state], outputs=[chatbot, msg, state])
109
 
110
  if __name__ == "__main__":
111
- demo.launch()
 
6
  Original file is located at
7
  https://colab.research.google.com/drive/1UgXple_p_R-0mq9p5vhOmFPo9cgdayJy
8
  """
 
9
  import gradio as gr
10
  from transformers import AutoModelForCausalLM, AutoTokenizer
11
 
12
+ # Load the pretrained DialoGPT model
13
  model_name = "microsoft/DialoGPT-small"
14
  tokenizer = AutoTokenizer.from_pretrained(model_name)
15
  model = AutoModelForCausalLM.from_pretrained(model_name)
16
 
17
+ # Hardcoded rule-based responses
18
  rules = {
19
  "hi": "Hello! How can I help you today?",
20
  "hello": "Hi there! How can I assist you?",
 
35
  "what is ai": "AI stands for Artificial Intelligence, which is intelligence demonstrated by machines.",
36
  "who created you": "I was created by a talented developer using Python and machine learning!",
37
  "how can i learn programming": "Start with basics like Python. There are many free tutorials online to get you started!",
38
+ "ok": "ok",
39
+ "who are you?": "I am nino",
40
+ "hi nino": "hi there",
41
  }
42
 
43
  def respond(user_input, history):
 
44
  if not isinstance(history, list):
 
45
  history = []
 
46
  if not isinstance(user_input, str):
 
47
  user_input = str(user_input)
48
 
49
  user_input_clean = user_input.lower().strip()
 
76
  bot_reply = "I'm not sure how to respond to that. Can you rephrase it?"
77
 
78
  history.append((user_input, bot_reply))
79
+ return history, "", history
80
 
81
  def save_chat(history):
82
+ try:
83
+ with open("/tmp/chat_history.txt", "w", encoding="utf-8") as f:
84
+ for user_msg, bot_msg in history:
85
+ if bot_msg is not None:
86
+ f.write(f"You: {user_msg}\nBot: {bot_msg}\n\n")
87
+ else:
88
+ f.write(f"You: {user_msg}\nBot: (No response)\n\n")
89
+ except Exception as e:
90
+ print("Error saving chat:", e)
 
91
 
92
  def process_input(user_input, history):
93
+ try:
94
+ result = respond(user_input, history)
95
+ save_chat(result[0])
96
+ return result
97
+ except Exception as e:
98
+ error_message = f"Error: {str(e)}"
99
+ if not isinstance(history, list):
100
+ history = []
101
+ history.append((user_input, error_message))
102
+ return history, "", history
103
 
104
  with gr.Blocks() as demo:
105
  chatbot = gr.Chatbot()
106
+ msg = gr.Textbox(placeholder="Type your message here...", show_label=False)
107
  state = gr.State([])
108
 
109
  msg.submit(process_input, inputs=[msg, state], outputs=[chatbot, msg, state])
110
 
111
  if __name__ == "__main__":
112
+ demo.launch()