thankfulcarp commited on
Commit
9330946
·
1 Parent(s): e500465

flash attn failure

Browse files
Files changed (2) hide show
  1. app.py +1 -2
  2. requirements.txt +1 -4
app.py CHANGED
@@ -213,8 +213,7 @@ def enhance_prompt_with_llm(prompt: str):
213
  "text-generation",
214
  model=ENHANCER_MODEL_ID,
215
  torch_dtype=torch.bfloat16,
216
- device_map="auto",
217
- model_kwargs={"attn_implementation": "flash_attention_2"}
218
  )
219
  print("✅ LLM Prompt Enhancer loaded successfully.")
220
  except Exception as e:
 
213
  "text-generation",
214
  model=ENHANCER_MODEL_ID,
215
  torch_dtype=torch.bfloat16,
216
+ device_map="auto"
 
217
  )
218
  print("✅ LLM Prompt Enhancer loaded successfully.")
219
  except Exception as e:
requirements.txt CHANGED
@@ -17,7 +17,4 @@ sentencepiece
17
  ftfy
18
  imageio
19
  imageio-ffmpeg
20
- opencv-python
21
-
22
- # Performance
23
- flash-attn
 
17
  ftfy
18
  imageio
19
  imageio-ffmpeg
20
+ opencv-python