esm_speedrun / config.json
lhallee's picture
Upload ESM
0e8b6a7 verified
raw
history blame contribute delete
242 Bytes
{
"architectures": [
"ESM"
],
"expansion_ratio": 2.6666666666666665,
"hidden_size": 512,
"num_attention_heads": 8,
"num_hidden_layers": 12,
"torch_dtype": "bfloat16",
"transformers_version": "4.47.1",
"vocab_size": 33
}