Spaces:
Sleeping
Sleeping
| import os | |
| import torch | |
| from threading import Thread | |
| from typing import List, Optional, Tuple, Dict | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, TextIteratorStreamer | |
| import spaces | |
| from pathlib import Path | |
| from huggingface_hub import CommitScheduler | |
| import uuid | |
| import json | |
| # Constants | |
| SYSTEM_PROMPT = """You are SmallThinker-3B, a helpful AI assistant. You try to follow instructions as much as possible while being accurate and brief.""" | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| TITLE = "<h1><center>SmallThinker-3B Chat</center></h1>" | |
| MODEL_PATH = "PowerInfer/SmallThinker-3B-Preview" | |
| # Custom CSS with dark theme | |
| CSS = """ | |
| .duplicate-button { | |
| margin: auto !important; | |
| color: white !important; | |
| background: black !important; | |
| border-radius: 100vh !important; | |
| } | |
| h3 { | |
| text-align: center; | |
| } | |
| .chat-container { | |
| height: 500px !important; | |
| overflow-y: auto !important; | |
| flex-direction: column !important; | |
| } | |
| .messages-container { | |
| flex-grow: 1 !important; | |
| overflow-y: auto !important; | |
| padding-right: 10px !important; | |
| } | |
| .contain { | |
| height: 100% !important; | |
| } | |
| button { | |
| border-radius: 8px !important; | |
| } | |
| """ | |
| # Load model and tokenizer | |
| model = AutoModelForCausalLM.from_pretrained( | |
| MODEL_PATH, | |
| torch_dtype=torch.bfloat16, | |
| ).to(device) | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH) | |
| logs_id = os.getenv("LOGS_ID") | |
| logs_token = os.getenv("HF_LOGS_TOKEN") | |
| logs_file = Path("logs/") / f"data_{uuid.uuid4()}.json" | |
| logs_folder = logs_file.parent | |
| scheduler = CommitScheduler( | |
| repo_id=logs_id, | |
| repo_type="dataset", | |
| folder_path=logs_folder, | |
| path_in_repo="data", | |
| every=5, | |
| token=logs_token, | |
| private=True, | |
| ) | |
| def stream_chat( | |
| message: str, | |
| history: list, | |
| temperature: float = 0.3, | |
| max_new_tokens: int = 1024, | |
| top_p: float = 1.0, | |
| top_k: int = 20, | |
| repetition_penalty: float = 1.2, | |
| ): | |
| # Create new history list with current message | |
| new_history = history + [[message, ""]] | |
| conversation = [] | |
| # Only include previous messages in the conversation | |
| for prompt, answer in history: | |
| conversation.extend([ | |
| {"role": "system", "content": SYSTEM_PROMPT}, | |
| {"role": "user", "content": prompt}, | |
| {"role": "assistant", "content": answer}, | |
| ]) | |
| conversation.append({"role": "user", "content": message}) | |
| input_text = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True) | |
| inputs = tokenizer.encode(input_text, return_tensors="pt").to(device) | |
| streamer = TextIteratorStreamer(tokenizer, timeout=40.0, skip_prompt=True, skip_special_tokens=True) | |
| generate_kwargs = dict( | |
| input_ids=inputs, | |
| max_new_tokens=max_new_tokens, | |
| do_sample=False if temperature == 0 else True, | |
| top_p=top_p, | |
| top_k=top_k, | |
| temperature=temperature, | |
| repetition_penalty=repetition_penalty, | |
| streamer=streamer, | |
| pad_token_id=tokenizer.pad_token_id, | |
| ) | |
| with torch.no_grad(): | |
| thread = Thread(target=model.generate, kwargs=generate_kwargs) | |
| thread.start() | |
| buffer = "" | |
| for new_text in streamer: | |
| buffer += new_text | |
| buffer = buffer.replace("\nUser", "") | |
| buffer = buffer.replace("\nSystem", "") | |
| new_history[-1][1] = buffer | |
| yield new_history | |
| with scheduler.lock: | |
| with logs_file.open("a") as f: | |
| f.write(json.dumps({"input": input_text.replace(SYSTEM_PROMPT, ""), "output": buffer.replace(SYSTEM_PROMPT, ""), "model": "SmallThinker-3B"})) | |
| f.write("\n") | |
| def clear_input(): | |
| return "" | |
| def add_message(message: str, history: list): | |
| if message.strip() != "": | |
| history = history + [[message, ""]] | |
| return history | |
| def clear_session() -> Tuple[str, List]: | |
| return '', [] | |
| def main(): | |
| with gr.Blocks(css=CSS, theme="soft") as demo: | |
| gr.HTML(TITLE) | |
| gr.DuplicateButton(value="Duplicate Space for private use", elem_classes="duplicate-button") | |
| with gr.Row(): | |
| with gr.Accordion(label="Chat Interface", open=True): | |
| chatbot = gr.Chatbot( | |
| label='SmallThinker-3B', | |
| height=500, | |
| container=True, | |
| elem_classes=["chat-container"] | |
| ) | |
| with gr.Accordion(label="⚙️ Parameters", open=False): | |
| temperature = gr.Slider(minimum=0, maximum=1, step=0.1, value=0.3, label="Temperature") | |
| max_new_tokens = gr.Slider(minimum=128, maximum=32768, step=128, value=16384, label="Max new tokens") | |
| top_p = gr.Slider(minimum=0.0, maximum=1.0, step=0.1, value=1.0, label="Top-p") | |
| top_k = gr.Slider(minimum=1, maximum=100, step=1, value=20, label="Top-k") | |
| repetition_penalty = gr.Slider(minimum=1.0, maximum=2.0, step=0.1, value=1.1, label="Repetition penalty") | |
| textbox = gr.Textbox(lines=1, label='Input') | |
| with gr.Row(): | |
| clear_history = gr.Button("🧹 Clear History") | |
| submit = gr.Button("🚀 Send") | |
| # Chain of events for submit button | |
| submit_event = submit.click( | |
| fn=add_message, | |
| inputs=[textbox, chatbot], | |
| outputs=chatbot, | |
| queue=False | |
| ).then( | |
| fn=clear_input, | |
| outputs=textbox, | |
| queue=False | |
| ).then( | |
| fn=stream_chat, | |
| inputs=[textbox, chatbot, temperature, max_new_tokens, top_p, top_k, repetition_penalty], | |
| outputs=chatbot, | |
| show_progress=True | |
| ) | |
| # Chain of events for enter key | |
| enter_event = textbox.submit( | |
| fn=add_message, | |
| inputs=[textbox, chatbot], | |
| outputs=chatbot, | |
| queue=False | |
| ).then( | |
| fn=clear_input, | |
| outputs=textbox, | |
| queue=False | |
| ).then( | |
| fn=stream_chat, | |
| inputs=[textbox, chatbot, temperature, max_new_tokens, top_p, top_k, repetition_penalty], | |
| outputs=chatbot, | |
| show_progress=True | |
| ) | |
| clear_history.click(fn=clear_session, | |
| outputs=[textbox, chatbot]) | |
| demo.launch() | |
| if __name__ == "__main__": | |
| main() |