{ "architectures": [ "ESM" ], "expansion_ratio": 2.6666666666666665, "hidden_size": 768, "num_attention_heads": 6, "num_hidden_layers": 24, "torch_dtype": "bfloat16", "transformers_version": "4.47.1", "vocab_size": 33 }