xue wang
commited on
Upload 2 files
Browse files- config.json +7 -7
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
|
3 |
|
4 |
"auto_map": {
|
5 |
"AutoConfig": "model_config.YingLongConfig",
|
@@ -14,11 +14,12 @@
|
|
14 |
"haar_trans": true,
|
15 |
"haar_trans_inv": true,
|
16 |
"haar_trans_norm": "backward",
|
17 |
-
"intermediate_size":
|
18 |
-
"n_embd":
|
19 |
-
"n_head":
|
20 |
-
"n_layer":
|
21 |
"n_query_groups": 4,
|
|
|
22 |
"parallel_residual": false,
|
23 |
"patch_size": 32,
|
24 |
"quantitle": true,
|
@@ -26,6 +27,5 @@
|
|
26 |
"rotary_percentage": 1.0,
|
27 |
"shared_attention_norm": false,
|
28 |
"unet": true,
|
29 |
-
"vocab_size": 1
|
30 |
-
"vq": false,
|
31 |
}
|
|
|
1 |
{
|
2 |
+
"architectures": ["YingLong"],
|
3 |
|
4 |
"auto_map": {
|
5 |
"AutoConfig": "model_config.YingLongConfig",
|
|
|
14 |
"haar_trans": true,
|
15 |
"haar_trans_inv": true,
|
16 |
"haar_trans_norm": "backward",
|
17 |
+
"intermediate_size": 1024,
|
18 |
+
"n_embd": 256,
|
19 |
+
"n_head": 16,
|
20 |
+
"n_layer": 6,
|
21 |
"n_query_groups": 4,
|
22 |
+
"norm_eps": 1e-05,
|
23 |
"parallel_residual": false,
|
24 |
"patch_size": 32,
|
25 |
"quantitle": true,
|
|
|
27 |
"rotary_percentage": 1.0,
|
28 |
"shared_attention_norm": false,
|
29 |
"unet": true,
|
30 |
+
"vocab_size": 1
|
|
|
31 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e2df65a7dc7730709b3e1b943fec891d72ef003aa42d0761370f5cd7aa7bf440
|
3 |
+
size 14646052
|