remyx
salma-remyx commited on
Commit
f60e53d
1 Parent(s): d0d3451

adding SpaceMinitron-4B weights

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. checkpoints/latest-checkpoint.pt +3 -0
  3. config.json +59 -0
  4. config.yaml +52 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ checkpoints/latest-checkpoint.pt filter=lfs diff=lfs merge=lfs -text
checkpoints/latest-checkpoint.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8c8ceddb893ef8d95b43e92eeffaffe335a56de36356a10ce0591fb5e239387
3
+ size 16984255468
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "align_stage_components": [
4
+ "/home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json",
5
+ "/home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct"
6
+ ],
7
+ "dataset_id": "spacellava-llava-lrv",
8
+ "dataset_root_dir": "/home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct",
9
+ "finetune_stage_components": [
10
+ "/home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json",
11
+ "/home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct"
12
+ ],
13
+ "type": "spacellava-llava-lrv"
14
+ },
15
+ "hf_token": ".hf_token",
16
+ "model": {
17
+ "align_epochs": 1,
18
+ "align_global_batch_size": 256,
19
+ "align_learning_rate": 0.001,
20
+ "align_lr_scheduler_type": "linear-warmup+cosine-decay",
21
+ "align_max_grad_norm": 1.0,
22
+ "align_max_steps": null,
23
+ "align_per_device_batch_size": 16,
24
+ "align_train_strategy": "fsdp-shard-grad-op",
25
+ "align_warmup_ratio": 0.03,
26
+ "align_weight_decay": 0.0,
27
+ "arch_specifier": "no-align+fused-gelu-mlp",
28
+ "enable_gradient_checkpointing": true,
29
+ "enable_mixed_precision_training": true,
30
+ "finetune_epochs": 1,
31
+ "finetune_global_batch_size": 128,
32
+ "finetune_learning_rate": 2e-05,
33
+ "finetune_lr_scheduler_type": "linear-warmup+cosine-decay",
34
+ "finetune_max_grad_norm": 1.0,
35
+ "finetune_max_steps": null,
36
+ "finetune_per_device_batch_size": 16,
37
+ "finetune_train_strategy": "fsdp-full-shard",
38
+ "finetune_warmup_ratio": 0.03,
39
+ "finetune_weight_decay": 0.1,
40
+ "image_resize_strategy": "resize-naive",
41
+ "llm_backbone_id": "minitron-4b-pure",
42
+ "llm_max_length": 2048,
43
+ "model_id": "minitron+4b+dinosiglip",
44
+ "reduce_in_full_precision": false,
45
+ "type": "minitron+4b+dinosiglip",
46
+ "vision_backbone_id": "dinosiglip-vit-so-224px"
47
+ },
48
+ "pretrained_checkpoint": null,
49
+ "run_id": "spacellava-llava-lrv+minitron+4b+dinosiglip+stage-finetune+x7",
50
+ "run_root_dir": "runs",
51
+ "seed": 7,
52
+ "stage": "finetune",
53
+ "trackers": [
54
+ "jsonl",
55
+ "wandb"
56
+ ],
57
+ "wandb_entity": "smellslikeml",
58
+ "wandb_project": "prismatic"
59
+ }
config.yaml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dataset:
2
+ align_stage_components:
3
+ - /home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json
4
+ - /home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct
5
+ dataset_id: spacellava-llava-lrv
6
+ dataset_root_dir: /home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct
7
+ finetune_stage_components:
8
+ - /home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct/llava_v1_5_lrv_mix1008k_spacellava.json
9
+ - /home/ubuntu/prismatic-vlms/data/download/llava-v1.5-instruct
10
+ type: spacellava-llava-lrv
11
+ hf_token: .hf_token
12
+ model:
13
+ align_epochs: 1
14
+ align_global_batch_size: 256
15
+ align_learning_rate: 0.001
16
+ align_lr_scheduler_type: linear-warmup+cosine-decay
17
+ align_max_grad_norm: 1.0
18
+ align_max_steps: null
19
+ align_per_device_batch_size: 16
20
+ align_train_strategy: fsdp-shard-grad-op
21
+ align_warmup_ratio: 0.03
22
+ align_weight_decay: 0.0
23
+ arch_specifier: no-align+fused-gelu-mlp
24
+ enable_gradient_checkpointing: true
25
+ enable_mixed_precision_training: true
26
+ finetune_epochs: 1
27
+ finetune_global_batch_size: 128
28
+ finetune_learning_rate: 2.0e-05
29
+ finetune_lr_scheduler_type: linear-warmup+cosine-decay
30
+ finetune_max_grad_norm: 1.0
31
+ finetune_max_steps: null
32
+ finetune_per_device_batch_size: 16
33
+ finetune_train_strategy: fsdp-full-shard
34
+ finetune_warmup_ratio: 0.03
35
+ finetune_weight_decay: 0.1
36
+ image_resize_strategy: resize-naive
37
+ llm_backbone_id: minitron-4b-pure
38
+ llm_max_length: 2048
39
+ model_id: minitron+4b+dinosiglip
40
+ reduce_in_full_precision: false
41
+ type: minitron+4b+dinosiglip
42
+ vision_backbone_id: dinosiglip-vit-so-224px
43
+ pretrained_checkpoint: null
44
+ run_id: spacellava-llava-lrv+minitron+4b+dinosiglip+stage-finetune+x7
45
+ run_root_dir: runs
46
+ seed: 7
47
+ stage: finetune
48
+ trackers:
49
+ - jsonl
50
+ - wandb
51
+ wandb_entity: smellslikeml
52
+ wandb_project: prismatic