aymanbakiri commited on
Commit
e376e18
·
verified ·
1 Parent(s): 8e5985d

Upload Qwen3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -52,7 +52,7 @@
52
  "rope_theta": 1000000,
53
  "sliding_window": null,
54
  "tie_word_embeddings": true,
55
- "torch_dtype": "float16",
56
  "transformers_version": "4.53.0.dev0",
57
  "use_cache": true,
58
  "use_sliding_window": false,
 
52
  "rope_theta": 1000000,
53
  "sliding_window": null,
54
  "tie_word_embeddings": true,
55
+ "torch_dtype": "float32",
56
  "transformers_version": "4.53.0.dev0",
57
  "use_cache": true,
58
  "use_sliding_window": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:119cf0dfa7748577398b10a0b2cf64418fcacdfe3dc5211d8efc3b88981d376a
3
- size 1192134784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e85ee3e2acc42d1748cf285d92b6b1a8988e4295b5edbec867fecf2af8cddd94
3
+ size 2384234968