Spaces:
Sleeping
Sleeping
import spaces for zerogpu
Browse files
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
import torch
|
3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
|
@@ -37,6 +38,8 @@ Your answer should be friendly, unbiased, faithful, informative and detailed.'
|
|
37 |
system_prompt = f"<|im_start|>{system_role}\n{system_prompt}<|im_end|>"
|
38 |
|
39 |
# Function to generate model predictions.
|
|
|
|
|
40 |
def predict(message, history):
|
41 |
# history = []
|
42 |
history_transformer_format = history + [[message, ""]]
|
|
|
1 |
+
import spaces
|
2 |
import gradio as gr
|
3 |
import torch
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer, StoppingCriteria, StoppingCriteriaList, TextIteratorStreamer
|
|
|
38 |
system_prompt = f"<|im_start|>{system_role}\n{system_prompt}<|im_end|>"
|
39 |
|
40 |
# Function to generate model predictions.
|
41 |
+
|
42 |
+
@spaces.GPU()
|
43 |
def predict(message, history):
|
44 |
# history = []
|
45 |
history_transformer_format = history + [[message, ""]]
|