File size: 2,871 Bytes
75f07f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
{
  "_name_or_path": "openbmb/UltraRAG-Embedding",
  "adapt_mean_pooling": true,
  "architectures": [
    "MiniCPMModel"
  ],
  "attention_bias": false,
  "attention_dropout": 0.0,
  "auto_map": {
    "AutoConfig": "configuration_minicpm.MiniCPMConfig",
    "AutoModel": "modeling_minicpm.MiniCPMModel",
    "AutoModelForCausalLM": "modeling_minicpm.MiniCPMForCausalLM",
    "AutoModelForSeq2SeqLM": "modeling_minicpm.MiniCPMForCausalLM",
    "AutoModelForSequenceClassification": "modeling_minicpm.MiniCPMForSequenceClassification"
  },
  "bos_token_id": 1,
  "dim_model_base": 256,
  "eos_token_id": 2,
  "hidden_act": "silu",
  "hidden_size": 1024,
  "initializer_range": 0.1,
  "intermediate_size": 4096,
  "is_causal": false,
  "max_position_embeddings": 4096,
  "model_type": "minicpm",
  "num_attention_heads": 16,
  "num_hidden_layers": 24,
  "num_key_value_heads": 2,
  "pretraining_tp": 1,
  "rms_norm_eps": 1e-05,
  "rope_scaling": {
    "long_factor": [
      1.0004360675811768,
      1.0668443441390991,
      1.1631425619125366,
      1.3025742769241333,
      1.5040205717086792,
      1.7941505908966064,
      2.2101221084594727,
      2.802666664123535,
      3.6389970779418945,
      4.804192543029785,
      6.39855432510376,
      8.527148246765137,
      11.277542114257812,
      14.684998512268066,
      18.69317054748535,
      23.13019371032715,
      27.72362518310547,
      32.1606559753418,
      36.168827056884766,
      39.57627868652344,
      42.32667541503906,
      44.45526885986328,
      46.04962921142578,
      47.21482849121094,
      48.05115509033203,
      48.64370346069336,
      49.05967712402344,
      49.34980392456055,
      49.551246643066406,
      49.69068145751953,
      49.78697967529297,
      49.85338592529297
    ],
    "original_max_position_embeddings": 4096,
    "short_factor": [
      1.0004360675811768,
      1.0668443441390991,
      1.1631425619125366,
      1.3025742769241333,
      1.5040205717086792,
      1.7941505908966064,
      2.2101221084594727,
      2.802666664123535,
      3.6389970779418945,
      4.804192543029785,
      6.39855432510376,
      8.527148246765137,
      11.277542114257812,
      14.684998512268066,
      18.69317054748535,
      23.13019371032715,
      27.72362518310547,
      32.1606559753418,
      36.168827056884766,
      39.57627868652344,
      42.32667541503906,
      44.45526885986328,
      46.04962921142578,
      47.21482849121094,
      48.05115509033203,
      48.64370346069336,
      49.05967712402344,
      49.34980392456055,
      49.551246643066406,
      49.69068145751953,
      49.78697967529297,
      49.85338592529297
    ],
    "type": "longrope"
  },
  "rope_theta": 10000.0,
  "scale_depth": 1.4,
  "scale_emb": 12,
  "torch_dtype": "bfloat16",
  "transformers_version": "4.37.2",
  "use_cache": false,
  "vocab_size": 73448
}