File size: 310 Bytes
07f5ec4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
{
"n_layers": 32,
"dim": 4096,
"hidden_dim": 14336,
"n_heads": 32,
"head_dim": 128,
"n_kv_heads": 8,
"norm_eps": 1e-05,
"vocab_size": 32000,
"sliding_window": 4096,
"quantization_config": {
"group_size": 64,
"bits": 4
},
"model_type": "mistral"
} |