Kfjjdjdjdhdhd commited on
Commit
e627161
·
verified ·
1 Parent(s): 0d7f0ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -290,7 +290,7 @@ HTML_CONTENT = """<!doctype html>
290
  #output {
291
  white-space: pre-wrap;
292
  word-break: break-word;
293
- display: none; /* Hidden output element */
294
  }
295
  </style>
296
  <script src="https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/dist/tasks-genai.js" crossorigin="anonymous"></script>
@@ -327,7 +327,7 @@ HTML_CONTENT = """<!doctype html>
327
  """
328
 
329
  JS_CONTENT = """
330
- import { FilesetResolver, LlmInference } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai';
331
  const chatInput = document.getElementById('input');
332
  const sendButton = document.getElementById('submit');
333
  const chatHistory = document.getElementById('chat-history');
@@ -390,9 +390,9 @@ function displayPartialResults(partialResults, complete) {
390
  outputElement.textContent = 'Result is empty';
391
  }
392
  sendButton.disabled = false;
393
- loadingIndicator.style.display = 'none'; // Hide loading indicator after response
394
- outputElement.style.display = 'none'; // Hide the raw output element
395
- displayBotMessage(outputElement.textContent); // Display processed bot message
396
  }
397
  }
398
 
@@ -434,17 +434,17 @@ sendButton.onclick = async () => {
434
  chatInput.value = '';
435
  sendButton.disabled = true;
436
  loadingIndicator.style.display = 'inline-block';
437
- outputElement.style.display = 'block'; // Show raw output during streaming
438
- outputElement.textContent = ''; // Clear previous output
439
 
440
  try {
441
  await llmInference.generateResponse(userMessageText, displayPartialResults);
442
  } catch (error) {
443
  console.error("Inference error:", error);
444
  displayBotMessage('Error generating response. Please try again.');
445
- sendButton.disabled = false; // Re-enable send button on error
446
- loadingIndicator.style.display = 'none'; // Hide loading indicator on error
447
- outputElement.style.display = 'none'; // Hide raw output on error
448
  }
449
  };
450
 
 
290
  #output {
291
  white-space: pre-wrap;
292
  word-break: break-word;
293
+ display: none;
294
  }
295
  </style>
296
  <script src="https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/dist/tasks-genai.js" crossorigin="anonymous"></script>
 
327
  """
328
 
329
  JS_CONTENT = """
330
+ import { FilesetResolver, LlmInference } from 'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai/dist/tasks-genai.js';
331
  const chatInput = document.getElementById('input');
332
  const sendButton = document.getElementById('submit');
333
  const chatHistory = document.getElementById('chat-history');
 
390
  outputElement.textContent = 'Result is empty';
391
  }
392
  sendButton.disabled = false;
393
+ loadingIndicator.style.display = 'none';
394
+ outputElement.style.display = 'none';
395
+ displayBotMessage(outputElement.textContent);
396
  }
397
  }
398
 
 
434
  chatInput.value = '';
435
  sendButton.disabled = true;
436
  loadingIndicator.style.display = 'inline-block';
437
+ outputElement.style.display = 'block';
438
+ outputElement.textContent = '';
439
 
440
  try {
441
  await llmInference.generateResponse(userMessageText, displayPartialResults);
442
  } catch (error) {
443
  console.error("Inference error:", error);
444
  displayBotMessage('Error generating response. Please try again.');
445
+ sendButton.disabled = false;
446
+ loadingIndicator.style.display = 'none';
447
+ outputElement.style.display = 'none';
448
  }
449
  };
450