Afeezee commited on
Commit
fd97744
·
verified ·
1 Parent(s): 185b795

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -11
app.py CHANGED
@@ -95,24 +95,23 @@ def analyze_document(file):
95
  """Processes and analyzes the uploaded document."""
96
  text = extract_text_from_file(file)
97
  if text.startswith("Unsupported file format"):
98
- yield f"**Error:** {text}"
99
- return
100
 
101
  chunks = chunk_text(text)
102
  all_insights = []
 
103
 
104
- yield "**Processing the document. Please wait...**\n"
105
  for i, chunk in enumerate(chunks, 1):
106
- yield f"**Processing chunk {i} of {len(chunks)}...**"
107
  result = analyze_chunk(chunk)
108
  if result.strip(): # Only append non-empty results
109
  all_insights.append(result)
110
 
111
  if not all_insights:
112
- yield "**Error:** No valid insights were extracted from the document."
113
- return
114
 
115
- yield "**Consolidating all insights into a final summary...**"
116
  consolidated_summary_prompt = (
117
  "Below are insights extracted from multiple chunks of a document. "
118
  "Consolidate these insights into a single output organized as follows: "
@@ -135,14 +134,15 @@ def analyze_document(file):
135
  final_summary = ""
136
  for chunk in stream:
137
  final_summary += chunk.choices[0].delta.content or ""
138
- yield f"**Final Summary:**\n\n{final_summary}"
 
139
 
140
  # Generate DOCX file after processing
141
  docx_file = save_as_docx(final_summary)
142
- return final_summary, docx_file
 
143
  except Exception as e:
144
- yield f"**Error:** An error occurred during consolidation: {e}"
145
- return final_summary, None
146
 
147
 
148
  # Define the Gradio interface
 
95
  """Processes and analyzes the uploaded document."""
96
  text = extract_text_from_file(file)
97
  if text.startswith("Unsupported file format"):
98
+ return f"**Error:** {text}", None
 
99
 
100
  chunks = chunk_text(text)
101
  all_insights = []
102
+ progress = []
103
 
104
+ progress.append("**Processing the document. Please wait...**")
105
  for i, chunk in enumerate(chunks, 1):
106
+ progress.append(f"**Processing chunk {i} of {len(chunks)}...**")
107
  result = analyze_chunk(chunk)
108
  if result.strip(): # Only append non-empty results
109
  all_insights.append(result)
110
 
111
  if not all_insights:
112
+ return "**Error:** No valid insights were extracted from the document.", None
 
113
 
114
+ progress.append("**Consolidating all insights into a final summary...**")
115
  consolidated_summary_prompt = (
116
  "Below are insights extracted from multiple chunks of a document. "
117
  "Consolidate these insights into a single output organized as follows: "
 
134
  final_summary = ""
135
  for chunk in stream:
136
  final_summary += chunk.choices[0].delta.content or ""
137
+
138
+ progress.append(f"**Final Summary:**\n\n{final_summary}")
139
 
140
  # Generate DOCX file after processing
141
  docx_file = save_as_docx(final_summary)
142
+ return "\n".join(progress), docx_file
143
+
144
  except Exception as e:
145
+ return f"**Error:** An error occurred during consolidation: {e}", None
 
146
 
147
 
148
  # Define the Gradio interface