Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,16 +1,16 @@
|
|
| 1 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
| 2 |
import gradio as gr
|
| 3 |
import spaces
|
| 4 |
import torch
|
| 5 |
import re
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
from threading import Thread
|
| 7 |
from typing import Iterator
|
| 8 |
from datetime import datetime
|
| 9 |
from huggingface_hub import HfApi, hf_hub_download
|
| 10 |
-
import
|
| 11 |
-
import os
|
| 12 |
-
import time
|
| 13 |
-
|
| 14 |
model_name = "Woziii/llama-3-8b-chat-me"
|
| 15 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype=torch.float16)
|
| 16 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import spaces
|
| 3 |
import torch
|
| 4 |
import re
|
| 5 |
+
import json
|
| 6 |
+
import os
|
| 7 |
+
import time
|
| 8 |
+
import threading
|
| 9 |
from threading import Thread
|
| 10 |
from typing import Iterator
|
| 11 |
from datetime import datetime
|
| 12 |
from huggingface_hub import HfApi, hf_hub_download
|
| 13 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
|
|
|
|
|
|
|
|
|
| 14 |
model_name = "Woziii/llama-3-8b-chat-me"
|
| 15 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype=torch.float16)
|
| 16 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|