riczhou commited on
Commit
63ae60d
·
verified ·
1 Parent(s): 15b6194

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<image_soft_token>": 262144
3
+ }
mlc-chat-config.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "gemma3_text",
4
+ "quantization": "q4f16_1",
5
+ "model_config": {
6
+ "text_config": {
7
+ "hidden_size": 1152,
8
+ "intermediate_size": 6912,
9
+ "num_hidden_layers": 26,
10
+ "attention_bias": false,
11
+ "num_attention_heads": 4,
12
+ "num_key_value_heads": 1,
13
+ "head_dim": 256,
14
+ "rms_norm_eps": 1e-06,
15
+ "hidden_activation": "gelu_pytorch_tanh",
16
+ "position_embedding_base": 1000000,
17
+ "context_window_size": 8192,
18
+ "prefill_chunk_size": 8192,
19
+ "query_pre_attn_scalar": 256,
20
+ "sliding_window": 512,
21
+ "kwargs": {
22
+ "architectures": [
23
+ "Gemma3ForCausalLM"
24
+ ],
25
+ "attention_dropout": 0.0,
26
+ "attn_logit_softcapping": null,
27
+ "bos_token_id": 2,
28
+ "cache_implementation": "hybrid",
29
+ "eos_token_id": [
30
+ 1,
31
+ 106
32
+ ],
33
+ "final_logit_softcapping": null,
34
+ "initializer_range": 0.02,
35
+ "max_position_embeddings": 32768,
36
+ "model_type": "gemma3_text",
37
+ "pad_token_id": 0,
38
+ "rope_local_base_freq": 10000,
39
+ "rope_scaling": null,
40
+ "sliding_window_pattern": 6,
41
+ "torch_dtype": "bfloat16",
42
+ "transformers_version": "4.50.0.dev0",
43
+ "use_cache": true
44
+ }
45
+ },
46
+ "vocab_size": 262144,
47
+ "tensor_parallel_shards": 1,
48
+ "max_batch_size": 128,
49
+ "context_window_size": 8192,
50
+ "sliding_window_size": -1,
51
+ "prefill_chunk_size": 8192,
52
+ "is_text_model": true
53
+ },
54
+ "vocab_size": 262144,
55
+ "context_window_size": 8192,
56
+ "sliding_window_size": -1,
57
+ "prefill_chunk_size": 8192,
58
+ "attention_sink_size": -1,
59
+ "tensor_parallel_shards": 1,
60
+ "pipeline_parallel_stages": 1,
61
+ "temperature": 1.0,
62
+ "presence_penalty": 0.0,
63
+ "frequency_penalty": 0.0,
64
+ "repetition_penalty": 1.0,
65
+ "top_p": 0.95,
66
+ "tokenizer_files": [
67
+ "tokenizer.model",
68
+ "tokenizer.json",
69
+ "added_tokens.json",
70
+ "tokenizer_config.json"
71
+ ],
72
+ "tokenizer_info": {
73
+ "token_postproc_method": "byte_fallback",
74
+ "prepend_space_in_encode": false,
75
+ "strip_space_in_decode": false
76
+ },
77
+ "conv_template": {
78
+ "name": "gemma_instruction",
79
+ "system_template": "{system_message}",
80
+ "system_message": "",
81
+ "system_prefix_token_ids": [
82
+ 2
83
+ ],
84
+ "add_role_after_system_message": true,
85
+ "roles": {
86
+ "user": "<start_of_turn>user",
87
+ "assistant": "<start_of_turn>model"
88
+ },
89
+ "role_templates": {
90
+ "user": "{user_message}",
91
+ "assistant": "{assistant_message}",
92
+ "tool": "{tool_message}"
93
+ },
94
+ "messages": [],
95
+ "seps": [
96
+ "<end_of_turn>\n"
97
+ ],
98
+ "role_content_sep": "\n",
99
+ "role_empty_sep": "\n",
100
+ "stop_str": [
101
+ "<end_of_turn>"
102
+ ],
103
+ "stop_token_ids": [
104
+ 1,
105
+ 107
106
+ ],
107
+ "function_string": "",
108
+ "use_function_calling": false
109
+ },
110
+ "pad_token_id": 0,
111
+ "bos_token_id": 2,
112
+ "eos_token_id": [
113
+ 1,
114
+ 106
115
+ ]
116
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0167d3222e5c58f11949a423d1c9ddc027c534d9e2e409cac572f3be73e51df
3
+ size 150994944
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1095155c03dcc30780380dbe51c07e5e6138f4927900c0e6d0d6ae5b19c28a0b
3
+ size 33150976
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fbe57dcd77abcd60414d933a48cb30c2fbb4e77aaf505f2e7f65fb2499b03fa
3
+ size 30212096
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42777010e9a830e07484351d5e180dfff1ed9f55aa4dc8c516f568ba25ab7c30
3
+ size 30212096
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80da72efd17c5cec576f82214c26b32f9884b6439a4165f8389fb54824d21786
3
+ size 30212096
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7d9b48258c751e0f9a8f1c9c576ec048bbb356d6bce40c6fcd91b77f5484249
3
+ size 30212096
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e20c5424f8f4e29c4828291f10d71834729c675d68b0c0376fc31650e055f47
3
+ size 15106048
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:578070661200590547eeeb26d40e8b23277751cee02f39e4e0bed3f4aa8c2f9c
3
+ size 31043840
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b266b9d5895b40c21500754a7ada0b981b5b71564adbfa3d4adafc0b63b4a5b
3
+ size 30212096
params_shard_4.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9163a846ec73e27b4aec8aaa273556ca283ae0d199a5c2b1d94e5c009b18f2ba
3
+ size 30212096
params_shard_5.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75e8447de696923f29b98d8a9d42aea24726abd294c6024f862c49229e4318b5
3
+ size 30212096
params_shard_6.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f9a96ad1693e23cccec89f594fbbaa7150b7837bc4cca1425fd3ceb8ad32289
3
+ size 30212096
params_shard_7.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04bfc87d65c03935bf8e3730d40d7e088c1cdeb6e2d73c925f84aea3c1cb2b6a
3
+ size 30212096
params_shard_8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4559e61c468c144ccb5c1fb31182c7e138ad4dd30b8caa463e37e1211c6a663d
3
+ size 30212096
params_shard_9.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86782725e2c021a3343570ee036e67e7d85d4180e88b8179d7bf402730ef44f0
3
+ size 30212096
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4667f2089529e8e7657cfb6d1c19910ae71ff5f28aa7ab2ff2763330affad795
3
+ size 33384568
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1299c11d7cf632ef3b4e11937501358ada021bbdf7c47638d13c0ee982f2e79c
3
+ size 4689074
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff