Spaces:
Running
Running
File size: 4,825 Bytes
0a94c2d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
import gradio as gr
# Custom CSS for gradient background and styling
custom_css = """
.gradio-container {
background: linear-gradient(135deg, #667eea 0%, #764ba2 25%, #f093fb 50%, #4facfe 75%, #00f2fe 100%);
background-size: 400% 400%;
animation: gradient-animation 15s ease infinite;
min-height: 100vh;
}
@keyframes gradient-animation {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
.dark .gradio-container {
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 25%, #0f3460 50%, #533483 75%, #e94560 100%);
background-size: 400% 400%;
animation: gradient-animation 15s ease infinite;
}
/* Style for content areas */
.main-container {
background-color: rgba(255, 255, 255, 0.95);
backdrop-filter: blur(10px);
border-radius: 20px;
padding: 20px;
box-shadow: 0 8px 32px 0 rgba(31, 38, 135, 0.37);
border: 1px solid rgba(255, 255, 255, 0.18);
margin: 10px;
}
.dark .main-container {
background-color: rgba(30, 30, 30, 0.95);
border: 1px solid rgba(255, 255, 255, 0.1);
}
"""
# State variable to track current model
current_model = gr.State("openai/gpt-oss-120b")
def switch_model(model_choice):
"""Function to switch between models"""
return gr.update(visible=False), gr.update(visible=True), model_choice
with gr.Blocks(fill_height=True, theme=gr.themes.Soft, css=custom_css) as demo:
with gr.Row():
# Sidebar
with gr.Column(scale=1):
with gr.Group(elem_classes="main-container"):
gr.Markdown("# ๐ Inference Provider")
gr.Markdown(
"This Space showcases OpenAI GPT-OSS models, served by the Cerebras API. "
"Sign in with your Hugging Face account to use this API."
)
# Model selection
model_dropdown = gr.Dropdown(
choices=[
"openai/gpt-oss-120b",
"openai/gpt-oss-20b"
],
value="openai/gpt-oss-120b",
label="๐ Select Model",
info="Choose between different model sizes"
)
# Login button
login_button = gr.LoginButton("Sign in with Hugging Face", size="lg")
# Reload button to apply model change
reload_btn = gr.Button("๐ Apply Model Change", variant="primary", size="lg")
# Additional options
with gr.Accordion("โ๏ธ Advanced Options", open=False):
gr.Markdown("*These options will be available after model implementation*")
temperature = gr.Slider(
minimum=0,
maximum=2,
value=0.7,
step=0.1,
label="Temperature"
)
max_tokens = gr.Slider(
minimum=1,
maximum=4096,
value=512,
step=1,
label="Max Tokens"
)
# Main chat area
with gr.Column(scale=3):
with gr.Group(elem_classes="main-container"):
gr.Markdown("## ๐ฌ Chat Interface")
# Container for model interfaces
with gr.Column(visible=True) as model_120b_container:
gr.Markdown("### Model: openai/gpt-oss-120b")
gr.load("models/openai/gpt-oss-120b", accept_token=login_button, provider="fireworks-ai")
with gr.Column(visible=False) as model_20b_container:
gr.Markdown("### Model: openai/gpt-oss-20b")
gr.load("models/openai/gpt-oss-20b", accept_token=login_button, provider="fireworks-ai")
# Handle model switching
reload_btn.click(
fn=switch_model,
inputs=[model_dropdown],
outputs=[model_120b_container, model_20b_container, current_model]
).then(
fn=lambda: gr.Info("Model switched successfully!"),
inputs=[],
outputs=[]
)
# Update visibility based on dropdown selection
def update_visibility(model_choice):
if model_choice == "openai/gpt-oss-120b":
return gr.update(visible=True), gr.update(visible=False)
else:
return gr.update(visible=False), gr.update(visible=True)
model_dropdown.change(
fn=update_visibility,
inputs=[model_dropdown],
outputs=[model_120b_container, model_20b_container]
)
demo.launch() |