Missing config.json

#1
by samedii - opened

If you add this to the transformers directory then it will be easily loaded with diffusers. :)

config.json

{
    "_class_name": "FluxTransformer2DModel",
    "_diffusers_version": "0.30.0.dev0",
    "_name_or_path": "./transformer",
    "attention_head_dim": 128,
    "axes_dims_rope": [
      16,
      56,
      56
    ],
    "guidance_embeds": true,
    "in_channels": 64,
    "joint_attention_dim": 4096,
    "num_attention_heads": 24,
    "num_layers": 19,
    "num_single_layers": 38,
    "patch_size": 1,
    "pooled_projection_dim": 768
  }

Sign up or log in to comment