cella110n commited on
Commit
2b181b3
·
verified ·
1 Parent(s): f5085c3

Upload config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +19 -20
transformer/config.json CHANGED
@@ -1,20 +1,19 @@
1
- {
2
- "_class_name": "FluxTransformer2DModel",
3
- "_diffusers_version": "0.32.0.dev0",
4
- "_name_or_path": "/root/.cache/huggingface/hub/models--black-forest-labs--FLUX.1-schnell/snapshots/741f7c3ce8b383c54771c7003378a50191e9efe9/transformer",
5
- "attention_head_dim": 128,
6
- "axes_dims_rope": [
7
- 16,
8
- 56,
9
- 56
10
- ],
11
- "guidance_embeds": false,
12
- "in_channels": 64,
13
- "joint_attention_dim": 4096,
14
- "num_attention_heads": 24,
15
- "num_layers": 19,
16
- "num_single_layers": 38,
17
- "out_channels": null,
18
- "patch_size": 1,
19
- "pooled_projection_dim": 768
20
- }
 
1
+ {
2
+ "_class_name": "FluxTransformer2DModel",
3
+ "_diffusers_version": "0.32.1",
4
+ "attention_head_dim": 128,
5
+ "axes_dims_rope": [
6
+ 16,
7
+ 56,
8
+ 56
9
+ ],
10
+ "guidance_embeds": true,
11
+ "in_channels": 64,
12
+ "joint_attention_dim": 4096,
13
+ "num_attention_heads": 24,
14
+ "num_layers": 6,
15
+ "num_single_layers": 12,
16
+ "out_channels": null,
17
+ "patch_size": 1,
18
+ "pooled_projection_dim": 768
19
+ }