Adinarayana02 commited on
Commit
54762a7
·
verified ·
1 Parent(s): 244aeeb

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -0
app.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import httpx
3
+ import streamlit as st
4
+
5
+ # Retrieve API keys from environment variables
6
+ HF_TOKEN = os.getenv("HF_TOKEN", "default_hf_token")
7
+
8
+ # Initialize Hugging Face API endpoint
9
+ HF_MODEL_URL = "https://api-inference.huggingface.co/models/Xenova/gpt-3.5-turbo"
10
+
11
+ # Function to get response from Hugging Face model
12
+ def get_response(user_query: str) -> str:
13
+ """Get a response from the Hugging Face model for the given user query."""
14
+ try:
15
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"}
16
+ payload = {"inputs": user_query}
17
+ response = httpx.post(HF_MODEL_URL, headers=headers, json=payload)
18
+ response.raise_for_status()
19
+ result = response.json()
20
+
21
+ # Check if result is a list and handle accordingly
22
+ if isinstance(result, list):
23
+ response_text = result[0].get("generated_text", "No response generated.")
24
+ else:
25
+ response_text = "Unexpected response format."
26
+
27
+ return response_text
28
+
29
+ except Exception as e:
30
+ return f"Error: {e}"
31
+
32
+ # Streamlit UI for customer support chatbot
33
+ st.title("Customer Support Chatbot")
34
+
35
+ user_query = st.text_input("Enter your query:", "")
36
+
37
+ if st.button("Get Response"):
38
+ with st.spinner("Processing..."):
39
+ try:
40
+ # Call the get_response function
41
+ response = get_response(user_query)
42
+ st.subheader("Chatbot Response")
43
+ st.write(response)
44
+ except Exception as e:
45
+ st.error(f"Error fetching response: {e}")