andito HF Staff commited on
Commit
0960d69
·
verified ·
1 Parent(s): 57136f4

Upload nanoVLM using push_to_hub

Browse files
Files changed (2) hide show
  1. config.json +4 -8
  2. model.safetensors +2 -2
config.json CHANGED
@@ -14,25 +14,21 @@
14
  "lm_rms_eps": 1e-05,
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
- "lm_base_vocab_size": 49152,
18
- "extra_token_amount": 1,
19
- "lm_vocab_size": 49153,
20
  "lm_n_heads": 15,
21
  "lm_n_kv_heads": 5,
22
  "lm_dropout": 0.0,
23
  "lm_n_blocks": 32,
24
  "lm_attn_scaling": 1.0,
25
- "lm_max_length": 512,
 
 
26
  "lm_use_tokens": false,
27
  "lm_tie_weights": true,
28
  "lm_model_type": "HuggingFaceTB/SmolLM2-360M-Instruct",
29
  "lm_tokenizer": "HuggingFaceTB/SmolLM2-360M-Instruct",
30
  "lm_eos_token_id": 0,
31
  "mp_pixel_shuffle_factor": 2,
32
- "mp_image_token_length": 64,
33
- "vlm_extra_tokens": {
34
- "image_token": "<|image|>"
35
- },
36
  "vlm_load_backbone_weights": true,
37
  "vlm_checkpoint_path": "checkpoints",
38
  "hf_repo_name": "nanoVLM"
 
14
  "lm_rms_eps": 1e-05,
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
+ "lm_vocab_size": 49152,
 
 
18
  "lm_n_heads": 15,
19
  "lm_n_kv_heads": 5,
20
  "lm_dropout": 0.0,
21
  "lm_n_blocks": 32,
22
  "lm_attn_scaling": 1.0,
23
+ "IMAGE_TOKEN_LENGTH": 49,
24
+ "TOTAL_SEQUENCE_LENGTH": 512,
25
+ "lm_max_length": 463,
26
  "lm_use_tokens": false,
27
  "lm_tie_weights": true,
28
  "lm_model_type": "HuggingFaceTB/SmolLM2-360M-Instruct",
29
  "lm_tokenizer": "HuggingFaceTB/SmolLM2-360M-Instruct",
30
  "lm_eos_token_id": 0,
31
  "mp_pixel_shuffle_factor": 2,
 
 
 
 
32
  "vlm_load_backbone_weights": true,
33
  "vlm_checkpoint_path": "checkpoints",
34
  "hf_repo_name": "nanoVLM"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95afe454831715ce98c4d97978d79871a7cdd0298170b4bc7d1850003116cbaf
3
- size 1802506128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a62350af2303daf6b292d7cf595b59544574f80d29727805a2bcad18c213dd8
3
+ size 1802502288