Spaces:
Sleeping
Sleeping
File size: 2,074 Bytes
61ecaab b55a3eb d2330df 065f5a8 61ecaab b55a3eb 61ecaab 4436a43 61ecaab 065f5a8 61ecaab 3248cbc a99122f 8bc947f 3248cbc 065f5a8 8190924 61ecaab e12b172 7a1231e add310b b3a0596 e12b172 61ecaab |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import gradio as gr
import spaces
import transformers
import torch
model_id = "GoToCompany/gemma2-9b-cpt-sahabatai-v1-instruct"
pipeline = transformers.pipeline(
"text-generation",
model=model_id,
model_kwargs={"torch_dtype": torch.bfloat16},
device_map="auto",
)
terminators = [
pipeline.tokenizer.eos_token_id,
pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>")
]
@spaces.GPU
def respond(
message,
history: list[tuple[str, str]],
max_tokens,
temperature,
top_p,
):
messages = []
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
outputs = pipeline(
messages,
max_new_tokens=max_tokens,
do_sample = True,
temperature=temperature,
top_p=top_p,
eos_token_id=terminators
)
yield outputs[0]["generated_text"][-1]["content"]
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
title = "🇮🇩 Sahabat AI (Gemma)",
description = """This model is a fine-tuned version of SEA-LIONv3's Gemma model trained predominantly on Indonesian, Javanese, and Sundanese data.
#### [Model page](https://huggingface.co/GoToCompany/gemma2-9b-cpt-sahabatai-v1-instruct)""",
examples = [["Tolong carin resep sop buntut dong"], ["Sopo wae sing ana ing Punakawan?"], ["Kumaha caritana si Kabayan?"]],
additional_inputs=[
gr.Slider(minimum=1, maximum=2048, value=256, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
demo.launch()
|