Flying-Lynx commited on
Commit
541f9ca
·
1 Parent(s): beb1701

update requirements

Browse files
Files changed (2) hide show
  1. app.py +4 -4
  2. requirements.txt +1 -0
app.py CHANGED
@@ -20,11 +20,11 @@ import argparse
20
  from transformers import TextIteratorStreamer
21
  from threading import Thread
22
 
23
- import subprocess
24
- import sys
25
 
26
- print("Installing flash attention...")
27
- subprocess.check_call([sys.executable, "-m", "pip", "install", "flash-attn==2.4.2", "--no-build-isolation"])
28
 
29
  no_change_btn = gr.Button()
30
  enable_btn = gr.Button(interactive=True)
 
20
  from transformers import TextIteratorStreamer
21
  from threading import Thread
22
 
23
+ # import subprocess
24
+ # import sys
25
 
26
+ # print("Installing flash attention...")
27
+ # subprocess.check_call([sys.executable, "-m", "pip", "install", "flash-attn==2.4.2", "--no-build-isolation"])
28
 
29
  no_change_btn = gr.Button()
30
  enable_btn = gr.Button(interactive=True)
requirements.txt CHANGED
@@ -40,6 +40,7 @@ timm==0.9.11
40
  toml==0.10.2
41
  tqdm==4.66.1
42
  loguru==0.7.3
 
43
  tenacity
44
  sqlitedict
45
  evaluate==0.4.3
 
40
  toml==0.10.2
41
  tqdm==4.66.1
42
  loguru==0.7.3
43
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.4.2/flash_attn-2.4.2+cu122torch2.1cxx11abiTRUE-cp310-cp310-linux_x86_64.whl
44
  tenacity
45
  sqlitedict
46
  evaluate==0.4.3