jeanmarcocruz207 commited on
Commit
cb12796
·
verified ·
1 Parent(s): 37f2ca6

Upload 29 files

Browse files
Files changed (1) hide show
  1. ui_logic.py +11 -8
ui_logic.py CHANGED
@@ -101,32 +101,36 @@ def main_chat(
101
  yield app_state, history, user_text, files
102
  return
103
 
104
- # Add user message to history so the user sees it immediately
105
  history_messages.append({"role": "user", "content": user_text})
 
 
 
106
 
107
  model_ready = model in downloaded_models or is_model_available(model)
108
  if not model_ready:
109
  gr.Info(f"El modelo '{model}' no está disponible localmente. Intentando descargarlo...")
110
- download_msg = {"role": "assistant", "content": f"📥 Descargando modelo '{model}'..."}
111
- history_messages.append(download_msg)
112
  yield app_state, _messages_to_pairs(history_messages), "", files
113
 
114
- pull_status = ""
115
  pull_success = False
116
  for status in pull_model_with_progress(model):
117
- pull_status = status
118
- download_msg["content"] = status
119
  yield app_state, _messages_to_pairs(history_messages), "", files
120
  if status.startswith("✅"):
121
  pull_success = True
122
 
123
  if not pull_success:
124
  gr.Error(f"No se pudo descargar el modelo '{model}'. Por favor, verifica el nombre o hazlo manualmente.")
 
 
125
  return
126
 
127
  if model not in downloaded_models:
128
  downloaded_models.append(model)
129
  gr.Info(f"Modelo '{model}' descargado con éxito.")
 
 
130
 
131
  # Prepare inputs
132
  files_blob, preview, _ = read_uploaded_files(files, "")
@@ -139,8 +143,7 @@ def main_chat(
139
  )
140
 
141
  # Stream response
142
- assistant_message = {"role": "assistant", "content": ""}
143
- history_messages.append(assistant_message)
144
 
145
  # The history sent to the model should not include the latest empty assistant message
146
  model_history_pairs = _messages_to_pairs(history_messages[:-1])
 
101
  yield app_state, history, user_text, files
102
  return
103
 
104
+ # Add user message + placeholder assistant so the UI updates at once
105
  history_messages.append({"role": "user", "content": user_text})
106
+ assistant_message = {"role": "assistant", "content": "⏳ Preparando respuesta..."}
107
+ history_messages.append(assistant_message)
108
+ yield app_state, _messages_to_pairs(history_messages), "", files
109
 
110
  model_ready = model in downloaded_models or is_model_available(model)
111
  if not model_ready:
112
  gr.Info(f"El modelo '{model}' no está disponible localmente. Intentando descargarlo...")
113
+ assistant_message["content"] = f"📥 Descargando modelo '{model}'..."
 
114
  yield app_state, _messages_to_pairs(history_messages), "", files
115
 
 
116
  pull_success = False
117
  for status in pull_model_with_progress(model):
118
+ assistant_message["content"] = status
 
119
  yield app_state, _messages_to_pairs(history_messages), "", files
120
  if status.startswith("✅"):
121
  pull_success = True
122
 
123
  if not pull_success:
124
  gr.Error(f"No se pudo descargar el modelo '{model}'. Por favor, verifica el nombre o hazlo manualmente.")
125
+ assistant_message["content"] = f"⚠️ No se pudo descargar '{model}'."
126
+ yield app_state, _messages_to_pairs(history_messages), user_text, files
127
  return
128
 
129
  if model not in downloaded_models:
130
  downloaded_models.append(model)
131
  gr.Info(f"Modelo '{model}' descargado con éxito.")
132
+ assistant_message["content"] = "⏳ Preparando respuesta..."
133
+ yield app_state, _messages_to_pairs(history_messages), "", files
134
 
135
  # Prepare inputs
136
  files_blob, preview, _ = read_uploaded_files(files, "")
 
143
  )
144
 
145
  # Stream response
146
+ assistant_message["content"] = ""
 
147
 
148
  # The history sent to the model should not include the latest empty assistant message
149
  model_history_pairs = _messages_to_pairs(history_messages[:-1])