a-r-r-o-w's picture
a-r-r-o-w HF staff
Upload folder using huggingface_hub
2ce524a verified
raw
history blame contribute delete
600 Bytes
{
"_class_name": "FluxTransformer2DModel",
"_diffusers_version": "0.33.0.dev0",
"_name_or_path": "black-forest-labs/Flux.1-Dev",
"attention_head_dim": 128,
"axes_dims_rope": [
16,
56,
56
],
"guidance_embeds": true,
"in_channels": 64,
"joint_attention_dim": 4096,
"num_attention_heads": 24,
"num_layers": 19,
"num_single_layers": 38,
"out_channels": null,
"patch_size": 1,
"pooled_projection_dim": 768,
"quantization_config": {
"modules_to_not_convert": null,
"quant_method": "torchao",
"quant_type": "int8wo",
"quant_type_kwargs": {}
}
}