dnzblgn commited on
Commit
9e3753a
·
verified ·
1 Parent(s): ef9db0b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -109,10 +109,10 @@ def create_faiss_index(chunks):
109
  doc_file.write(chunk + "\n--END--\n")
110
 
111
  def handle_uploaded_file(file):
112
- # Convert Gradio file object to a string and save it locally for processing
113
  file_path = os.path.join(UPLOAD_FOLDER, "uploaded_comments.txt")
114
  with open(file_path, "w", encoding="utf-8") as f:
115
- f.write(file.read()) # Read the contents from NamedString and write to a file
116
 
117
  with open(file_path, "r", encoding="utf-8") as f:
118
  comments = f.readlines()
@@ -133,7 +133,6 @@ def handle_uploaded_file(file):
133
 
134
  return "File uploaded and processed successfully."
135
 
136
-
137
  def mistral_generate_response(prompt):
138
  inputs = mistral_tokenizer(prompt, return_tensors="pt").to("cuda")
139
  with torch.no_grad():
 
109
  doc_file.write(chunk + "\n--END--\n")
110
 
111
  def handle_uploaded_file(file):
112
+ # Save the contents directly from the NamedString
113
  file_path = os.path.join(UPLOAD_FOLDER, "uploaded_comments.txt")
114
  with open(file_path, "w", encoding="utf-8") as f:
115
+ f.write(file) # `file` is already the content of the file as a string
116
 
117
  with open(file_path, "r", encoding="utf-8") as f:
118
  comments = f.readlines()
 
133
 
134
  return "File uploaded and processed successfully."
135
 
 
136
  def mistral_generate_response(prompt):
137
  inputs = mistral_tokenizer(prompt, return_tensors="pt").to("cuda")
138
  with torch.no_grad():