File size: 658 Bytes
1fa8e35 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"attention_dropout": 0.0,
"base_model_name_or_path": "meta-llama/Llama-2-13b-chat-hf",
"calibration_type": "transformer",
"feature_key": "hidden_states",
"freeze_base_model": true,
"hidden_act": "silu",
"in_features": 5120,
"inference_mode": true,
"init_temperature": 1.0,
"intermediate_size": 13824,
"label_smoothing": 0.0,
"label_smoothing_type": "topk",
"layer_idx": 40,
"log_auxiliary_info": false,
"loss_type": "xent",
"max_position_embeddings": 5120,
"normalize_logits": false,
"num_attention_heads": 40,
"num_key_value_heads": 40,
"smooth_loss_weight": 0.5,
"smoothing_topk": 5,
"task_type": "CAUSAL_LM"
} |