dnzblgn commited on
Commit
3f6d062
·
verified ·
1 Parent(s): fd10698

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -28
app.py CHANGED
@@ -1,5 +1,3 @@
1
- # Revised: RAG system with fallback models if Mistral fails
2
-
3
  import gradio as gr
4
  import torch
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
@@ -172,33 +170,16 @@ def user_query_with_rag(query, qa_chain, chatbot):
172
  yield history, ""
173
 
174
  def demo():
175
- with gr.Blocks(title="RAG Analyzer", css="""
176
- .chat-message { display: flex; flex-direction: column; }
177
- .chat-message .text { background: #f3f3f3; padding: 8px 12px; border-radius: 10px; max-width: 80%; margin: 4px; }
178
- .chat-message.user .text { align-self: flex-end; background: #d1e7dd; }
179
- .chat-message.assistant .text { align-self: flex-start; background: #e2e3e5; }
180
- .gradio-container { max-width: 1000px !important; margin: auto; font-family: 'Segoe UI', sans-serif; }
181
- .gr-button { font-weight: bold; background-color: #0d6efd !important; color: white; }
182
- """) as app:
183
-
184
  db_state = gr.State(None)
185
  chain_state = gr.State(None)
186
-
187
- gr.Markdown("""
188
- # 🧠 Customer Review Analyzer
189
- Upload a `.txt` file of reviews and chat with a custom AI system based on your data.
190
- """)
191
-
192
- with gr.Row():
193
- with gr.Column(scale=1):
194
- file_input = gr.File(label="📄 Upload Review File (.txt)", type="filepath")
195
- status = gr.Textbox(label="Status", interactive=False)
196
- process_btn = gr.Button("🚀 Process Reviews", variant="primary")
197
-
198
- with gr.Column(scale=2):
199
- chatbot = gr.Chatbot(label="💬 Chat", height=500, show_copy_button=True, render=False)
200
- user_input = gr.Textbox(placeholder="Ask about your reviews...", show_label=False)
201
- submit_btn = gr.Button("Send", variant="secondary")
202
 
203
  process_btn.click(process_and_initialize, inputs=[file_input], outputs=[db_state, chain_state, status])
204
  submit_btn.click(user_query_with_rag, inputs=[user_input, chain_state, chatbot], outputs=[chatbot, user_input])
@@ -207,4 +188,4 @@ def demo():
207
  return app
208
 
209
  if __name__ == "__main__":
210
- demo().launch(server_name="0.0.0.0", server_port=7860, share=False)
 
 
 
1
  import gradio as gr
2
  import torch
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
 
170
  yield history, ""
171
 
172
  def demo():
173
+ with gr.Blocks(title="RAG Analyzer") as app:
 
 
 
 
 
 
 
 
174
  db_state = gr.State(None)
175
  chain_state = gr.State(None)
176
+ gr.Markdown("# 🧠 Customer Review Analyzer with Fallback RAG")
177
+ file_input = gr.File(label="Upload review file (.txt)", type="filepath")
178
+ status = gr.Textbox(label="Status")
179
+ chatbot = gr.Chatbot(label="Chatbot", height=400)
180
+ user_input = gr.Textbox(placeholder="Ask about the reviews...", show_label=False)
181
+ submit_btn = gr.Button("Send")
182
+ process_btn = gr.Button("Process Reviews")
 
 
 
 
 
 
 
 
 
183
 
184
  process_btn.click(process_and_initialize, inputs=[file_input], outputs=[db_state, chain_state, status])
185
  submit_btn.click(user_query_with_rag, inputs=[user_input, chain_state, chatbot], outputs=[chatbot, user_input])
 
188
  return app
189
 
190
  if __name__ == "__main__":
191
+ demo().launch(server_name="0.0.0.0", server_port=7860, share=False)