import gradio as gr import os import json import requests GROQ_API_KEY = os.environ.get("GROQ_API_KEY") GROQ_API_URL = "https://api.groq.com/openai/v1/chat/completions" SYSTEM_MESSAGE = os.environ.get("System_Prompt") MODEL_NAME = "meta-llama/llama-4-maverick-17b-128e-instruct" MAX_TOKENS = 1024 TEMPERATURE = 0.7 TOP_P = 0.95 def respond(message, history: list[tuple[str, str]]): messages = [{"role": "system", "content": SYSTEM_MESSAGE}] for user_msg, assistant_msg in history: if user_msg: messages.append({"role": "user", "content": user_msg}) if assistant_msg: messages.append({"role": "assistant", "content": assistant_msg}) messages.append({"role": "user", "content": message}) headers = { "Content-Type": "application/json", "Authorization": f"Bearer {GROQ_API_KEY}" } payload = { "model": MODEL_NAME, "messages": messages, "max_tokens": MAX_TOKENS, "temperature": TEMPERATURE, "top_p": TOP_P, "stream": True } response = requests.post( GROQ_API_URL, headers=headers, json=payload, stream=True ) accumulated_response = "" for line in response.iter_lines(): if line: line_text = line.decode('utf-8') if line_text.startswith("data: "): data_str = line_text[6:] if data_str == "[DONE]": break try: data = json.loads(data_str) if 'choices' in data and len(data['choices']) > 0: delta = data['choices'][0].get('delta', {}) if 'content' in delta and delta['content']: token = delta['content'] accumulated_response += token yield accumulated_response except json.JSONDecodeError: continue if not accumulated_response: yield "Lo siento, ocurrió un error al procesar tu solicitud." demo = gr.ChatInterface( respond, examples=[["¡Bienvenido a Barcenas Gran Bajio Network!"], ["¿Cuál es el objetivo principal de El Gran Bajío Network?"], ["¿Qué beneficios clave obtengo al ser miembro?"], ["¿Qué tipo de actividades o conexiones fomenta la red?"] ] ) if __name__ == "__main__": demo.launch()