Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
| 3 |
"Qwen2_5_VLForConditionalGeneration"
|
| 4 |
],
|
| 5 |
"attention_dropout": 0.0,
|
| 6 |
-
"
|
| 7 |
"eos_token_id": 151645,
|
| 8 |
"hidden_act": "silu",
|
| 9 |
"hidden_size": 3584,
|
|
@@ -35,7 +35,7 @@
|
|
| 35 |
],
|
| 36 |
"attention_dropout": 0.0,
|
| 37 |
"bos_token_id": 151643,
|
| 38 |
-
"
|
| 39 |
"eos_token_id": 151645,
|
| 40 |
"hidden_act": "silu",
|
| 41 |
"hidden_size": 3584,
|
|
@@ -106,7 +106,7 @@
|
|
| 106 |
"video_token_id": 151656,
|
| 107 |
"vision_config": {
|
| 108 |
"depth": 32,
|
| 109 |
-
"
|
| 110 |
"fullatt_block_indexes": [
|
| 111 |
7,
|
| 112 |
15,
|
|
|
|
| 3 |
"Qwen2_5_VLForConditionalGeneration"
|
| 4 |
],
|
| 5 |
"attention_dropout": 0.0,
|
| 6 |
+
"torch_dtype": "bfloat16",
|
| 7 |
"eos_token_id": 151645,
|
| 8 |
"hidden_act": "silu",
|
| 9 |
"hidden_size": 3584,
|
|
|
|
| 35 |
],
|
| 36 |
"attention_dropout": 0.0,
|
| 37 |
"bos_token_id": 151643,
|
| 38 |
+
"torch_dtype": "bfloat16",
|
| 39 |
"eos_token_id": 151645,
|
| 40 |
"hidden_act": "silu",
|
| 41 |
"hidden_size": 3584,
|
|
|
|
| 106 |
"video_token_id": 151656,
|
| 107 |
"vision_config": {
|
| 108 |
"depth": 32,
|
| 109 |
+
"torch_dtype": "bfloat16",
|
| 110 |
"fullatt_block_indexes": [
|
| 111 |
7,
|
| 112 |
15,
|