File size: 595 Bytes
7940a7e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
{
  "_class_name": "FluxTransformer2DModel",
  "_diffusers_version": "0.32.1",
  "_name_or_path": "black-forest-labs/Flux.1-Dev",
  "attention_head_dim": 128,
  "axes_dims_rope": [
    16,
    56,
    56
  ],
  "guidance_embeds": true,
  "in_channels": 64,
  "joint_attention_dim": 4096,
  "num_attention_heads": 24,
  "num_layers": 19,
  "num_single_layers": 38,
  "out_channels": null,
  "patch_size": 1,
  "pooled_projection_dim": 768,
  "quantization_config": {
    "modules_to_not_convert": null,
    "quant_method": "torchao",
    "quant_type": "int8wo",
    "quant_type_kwargs": {}
  }
}