{ "model_class": "MSAGPT", "tokenizer_type": "ProteinTokenizer", "num_layers": 36, "hidden_size": 2560, "inner_hidden_size": 6832, "num_attention_heads": 40, "vocab_size": 128, "layernorm_order": "post", "model_parallel_size": 1, "max_sequence_length": 2048, "untie_head": true, "head_num": 2, "moe": false, "expert": 1 }