|
{ |
|
"_name_or_path": "QCRI/bert-base-multilingual-cased-pos-english", |
|
"architectures": [ |
|
"BertForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"classifier_dropout": null, |
|
"directionality": "bidi", |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "O", |
|
"1": "``", |
|
"2": ",", |
|
"3": ":", |
|
"4": ".", |
|
"5": "''", |
|
"6": "$", |
|
"7": "#", |
|
"8": "CC", |
|
"9": "CD", |
|
"10": "DT", |
|
"11": "EX", |
|
"12": "FW", |
|
"13": "IN", |
|
"14": "JJ", |
|
"15": "JJR", |
|
"16": "JJS", |
|
"17": "-LRB-", |
|
"18": "LS", |
|
"19": "MD", |
|
"20": "NN", |
|
"21": "NNP", |
|
"22": "NNPS", |
|
"23": "NNS", |
|
"24": "PDT", |
|
"25": "POS", |
|
"26": "PRP", |
|
"27": "PRP$", |
|
"28": "RB", |
|
"29": "RBR", |
|
"30": "RBS", |
|
"31": "RP", |
|
"32": "-RRB-", |
|
"33": "SYM", |
|
"34": "TO", |
|
"35": "UH", |
|
"36": "VB", |
|
"37": "VBD", |
|
"38": "VBG", |
|
"39": "VBN", |
|
"40": "VBP", |
|
"41": "VBZ", |
|
"42": "WDT", |
|
"43": "WP", |
|
"44": "WP$", |
|
"45": "WRB" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"#": 7, |
|
"$": 6, |
|
"''": 5, |
|
",": 2, |
|
"-LRB-": 17, |
|
"-RRB-": 32, |
|
".": 4, |
|
":": 3, |
|
"CC": 8, |
|
"CD": 9, |
|
"DT": 10, |
|
"EX": 11, |
|
"FW": 12, |
|
"IN": 13, |
|
"JJ": 14, |
|
"JJR": 15, |
|
"JJS": 16, |
|
"LS": 18, |
|
"MD": 19, |
|
"NN": 20, |
|
"NNP": 21, |
|
"NNPS": 22, |
|
"NNS": 23, |
|
"O": 0, |
|
"PDT": 24, |
|
"POS": 25, |
|
"PRP": 26, |
|
"PRP$": 27, |
|
"RB": 28, |
|
"RBR": 29, |
|
"RBS": 30, |
|
"RP": 31, |
|
"SYM": 33, |
|
"TO": 34, |
|
"UH": 35, |
|
"VB": 36, |
|
"VBD": 37, |
|
"VBG": 38, |
|
"VBN": 39, |
|
"VBP": 40, |
|
"VBZ": 41, |
|
"WDT": 42, |
|
"WP": 43, |
|
"WP$": 44, |
|
"WRB": 45, |
|
"``": 1 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"max_position_embeddings": 512, |
|
"model_type": "bert", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"pad_token_id": 0, |
|
"pooler_fc_size": 768, |
|
"pooler_num_attention_heads": 12, |
|
"pooler_num_fc_layers": 3, |
|
"pooler_size_per_head": 128, |
|
"pooler_type": "first_token_transform", |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.47.1", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 119547 |
|
} |
|
|