jingyq1 commited on
Commit
88ec730
·
verified ·
1 Parent(s): f3f67f3

Upload ckpts_retriConv_b1_h1_cc12m checkpoint

Browse files
config.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "deepseek-ai/Janus-Pro-1B",
3
+ "aligner_config": {
4
+ "cls": "MlpProjector",
5
+ "model_type": "aligner",
6
+ "params": {
7
+ "depth": 2,
8
+ "input_dim": 1024,
9
+ "n_embed": 2048,
10
+ "projector_type": "mlp_gelu"
11
+ }
12
+ },
13
+ "architectures": [
14
+ "MultiModalityCausalLM"
15
+ ],
16
+ "gen_aligner_config": {
17
+ "cls": "MlpProjector",
18
+ "model_type": "gen_aligner",
19
+ "params": {
20
+ "depth": 2,
21
+ "input_dim": 8,
22
+ "n_embed": 2048,
23
+ "projector_type": "mlp_gelu"
24
+ }
25
+ },
26
+ "gen_head_config": {
27
+ "cls": "vision_head",
28
+ "model_type": "gen_head",
29
+ "params": {
30
+ "image_token_embed": 2048,
31
+ "image_token_size": 16384,
32
+ "n_embed": 2048
33
+ }
34
+ },
35
+ "gen_vision_config": {
36
+ "cls": "VQ-16",
37
+ "model_type": "gen_vision",
38
+ "params": {
39
+ "image_token_size": 16384,
40
+ "n_embed": 8
41
+ }
42
+ },
43
+ "language_config": {
44
+ "_attn_implementation_autoset": true,
45
+ "hidden_size": 2048,
46
+ "intermediate_size": 5632,
47
+ "max_position_embeddings": 16384,
48
+ "model_type": "llama",
49
+ "num_SmoothPatchBlender": 24,
50
+ "num_attention_heads": 16,
51
+ "num_hidden_layers": 24,
52
+ "num_hop": [
53
+ 1
54
+ ],
55
+ "num_key_value_heads": 16,
56
+ "torch_dtype": "bfloat16",
57
+ "vocab_size": 102400
58
+ },
59
+ "model_type": "multi_modality",
60
+ "torch_dtype": "bfloat16",
61
+ "transformers_version": "4.48.3",
62
+ "use_cache": false,
63
+ "vision_config": {
64
+ "cls": "CLIPVisionTower",
65
+ "model_type": "vision",
66
+ "params": {
67
+ "image_size": 384,
68
+ "model_name": "siglip_large_patch16_384",
69
+ "select_feature": "same",
70
+ "select_layer": -1
71
+ }
72
+ }
73
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca2a7a1e97fa438be665c67d4346e927f9a8d939cf4331a547e1abf130789e24
3
+ size 4254225472
special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<image_placeholder>",
4
+ "<patch_placeholder>",
5
+ "<|ref|>",
6
+ "<|/ref|>",
7
+ "<|det|>",
8
+ "<|/det|>",
9
+ "<|grounding|>",
10
+ "<|User|>",
11
+ "<|Assistant|>"
12
+ ],
13
+ "bos_token": {
14
+ "content": "<|begin▁of▁sentence|>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "eos_token": {
21
+ "content": "<|end▁of▁sentence|>",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "pad_token": {
28
+ "content": "<|▁pad▁|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aee99176f2374186ed5dc7c8080bebbef3b14f74792aa53747dcaad14d8e05ec
3
+ size 5496