electric-otter's picture
Update app.py
ca18b17 verified
import gradio as gr
import datasets
# Load NVIDIA’s Llama-Nemotron dataset (public)
dataset = datasets.load_dataset("nvidia/Llama-Nemotron-Post-Training-Dataset-v1", split="train")
# Function to find relevant info from the dataset
def search_dataset(query):
results = []
for data in dataset.shuffle(seed=42).select(range(10)): # Search 10 random samples
if query.lower() in data["text"].lower():
results.append(data["text"])
return "\n\n".join(results) if results else "No relevant data found."
# Function to generate responses
def chat(user_message):
context = search_dataset(user_message) # Get relevant dataset content
system_prompt = "You are Jellyfish AI, an advanced assistant with knowledge from NVIDIA’s dataset."
return f"{system_prompt}\nContext: {context}\nUser: {user_message}\nJellyfish AI:"
# Gradio UI
with gr.Blocks(fill_height=True) as demo:
with gr.Sidebar():
gr.Markdown("# Jellyfish AI 2025 1.0.0")
gr.Markdown("Powered by NVIDIA’s Llama-Nemotron dataset. No external API needed!")
gr.Markdown("### Chat with Jellyfish AI")
user_input = gr.Textbox(label="Your Message")
output = gr.Textbox(label="Jellyfish AI's Response", interactive=False)
chat_button = gr.Button("Send")
chat_button.click(chat, inputs=user_input, outputs=output)
demo.launch()