huseinzol05 commited on
Commit
7a4d919
·
verified ·
1 Parent(s): f2f0c71

Upload Qwen2ForCausalLM

Browse files
generation_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "attn_implementation": "sdpa",
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151643,
5
  "max_new_tokens": 2048,
 
1
  {
2
+ "attn_implementation": "flash_attention_2",
3
  "bos_token_id": 151643,
4
  "eos_token_id": 151643,
5
  "max_new_tokens": 2048,
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:33add42197c81a23714d57876e6e53e19cc53d90a8ac50658b223bf0f0883e64
3
  size 4985611048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdbe3c6e121a1167da2cc461f0984f97be1cba4581fa02025d0b67940c193005
3
  size 4985611048
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5c5795359e19ddd63c2a349f3202aad0ecf822ccd6e732b293570073eece586
3
  size 1288325688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2871e16723699d428d01ef349a78de9208d9c8b9beb9eb5d0f7dcb896b8df47
3
  size 1288325688