cllatMTK commited on
Commit
6399b12
·
verified ·
1 Parent(s): 7547fd1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -16,23 +16,23 @@ print("Your Computer IP Address is:" + IPAddr)
16
 
17
 
18
  DESCRIPTION = """
19
- # Demo: Breeze-7B-Instruct-v0.1
20
 
21
- Breeze-7B is a language model family that builds on top of [Mistral-7B](https://huggingface.co/mistralai/Mistral-7B-v0.1), specifically intended for Traditional Chinese use.
22
 
23
- [Breeze-7B-Base](https://huggingface.co/MediaTek-Research/Breeze-7B-Base-v0.1) is the base model for the Breeze-7B series.
24
  It is suitable for use if you have substantial fine-tuning data to tune it for your specific use case.
25
 
26
- [Breeze-7B-Instruct](https://huggingface.co/MediaTek-Research/Breeze-7B-Instruct-v0.1) derives from the base model Breeze-7B-Base, making the resulting model amenable to be used as-is for commonly seen tasks.
27
 
28
- [Breeze-7B-Instruct-64k](https://huggingface.co/MediaTek-Research/Breeze-7B-Instruct-64k-v0.1) is a slightly modified version of
29
- Breeze-7B-Instruct to enable a 64k-token context length. Roughly speaking, that is equivalent to 88k Traditional Chinese characters.
30
 
31
- The current release version of Breeze-7B is v0.1.
32
 
33
  *A project by the members (in alphabetical order): Chan-Jan Hsu 許湛然, Chang-Le Liu 劉昶樂, Feng-Ting Liao 廖峰挺, Po-Chun Hsu 許博竣, Yi-Chang Chen 陳宜昌, and the supervisor Da-Shan Shiu 許大山.*
34
 
35
- **免責聲明: Breeze-7B-Instruct 和 Breeze-7B-Instruct-64k 並未針對問答進行安全保護,因此語言模型的任何回應不代表 MediaTek Research 立場。**
36
  """
37
 
38
  LICENSE = """
@@ -54,7 +54,7 @@ HEADERS = {
54
  MAX_SEC = 30
55
  MAX_INPUT_LENGTH = 5000
56
 
57
- tokenizer = AutoTokenizer.from_pretrained("MediaTek-Research/Breeze-7B-Instruct-v0_1")
58
 
59
  def insert_to_db(prompt, response, temperature, top_p):
60
  try:
@@ -210,7 +210,7 @@ with gr.Blocks() as demo:
210
  yield history
211
  else:
212
  data = {
213
- "model_type": "breeze-7b-instruct-v01",
214
  "prompt": str(message),
215
  "parameters": {
216
  "temperature": float(temperature),
 
16
 
17
 
18
  DESCRIPTION = """
19
+ # Demo: Breexe-8x7B-Instruct-v0.1
20
 
21
+ Breexe-8x7B is a language model family that builds on top of [Mistral-7B](https://huggingface.co/mistralai/Mistral-7B-v0.1), specifically intended for Traditional Chinese use.
22
 
23
+ [Breexe-8x7B-Base](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Base-v0.1) is the base model for the Breexe-8x7B series.
24
  It is suitable for use if you have substantial fine-tuning data to tune it for your specific use case.
25
 
26
+ [Breexe-8x7B-Instruct](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-v0.1) derives from the base model Breexe-8x7B-Base, making the resulting model amenable to be used as-is for commonly seen tasks.
27
 
28
+ [Breexe-8x7B-Instruct-64k](https://huggingface.co/MediaTek-Research/Breexe-8x7B-Instruct-64k-v0.1) is a slightly modified version of
29
+ Breexe-8x7B-Instruct to enable a 64k-token context length. Roughly speaking, that is equivalent to 88k Traditional Chinese characters.
30
 
31
+ The current release version of Breexe-8x7B is v0.1.
32
 
33
  *A project by the members (in alphabetical order): Chan-Jan Hsu 許湛然, Chang-Le Liu 劉昶樂, Feng-Ting Liao 廖峰挺, Po-Chun Hsu 許博竣, Yi-Chang Chen 陳宜昌, and the supervisor Da-Shan Shiu 許大山.*
34
 
35
+ **免責聲明: Breexe-8x7B-Instruct 和 Breexe-8x7B-Instruct-64k 並未針對問答進行安全保護,因此語言模型的任何回應不代表 MediaTek Research 立場。**
36
  """
37
 
38
  LICENSE = """
 
54
  MAX_SEC = 30
55
  MAX_INPUT_LENGTH = 5000
56
 
57
+ tokenizer = AutoTokenizer.from_pretrained("MediaTek-Research/Breexe-8x7B-Instruct-v0_1")
58
 
59
  def insert_to_db(prompt, response, temperature, top_p):
60
  try:
 
210
  yield history
211
  else:
212
  data = {
213
+ "model_type": "breexe-8x7b-instruct-v01",
214
  "prompt": str(message),
215
  "parameters": {
216
  "temperature": float(temperature),