{ "_name_or_path": "bert-base-cased", "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-Description", "1": "B-Responsibility", "2": "I-AcademicWritingMoves", "3": "B-Strategic", "4": "I-Contingent", "5": "I-InformationStates", "6": "I-Interactive", "7": "B-SyntacticComplexity", "8": "B-InformationChange", "9": "I-Description", "10": "I-Narrative", "11": "B-InformationTopics", "12": "B-MetadiscourseInteractive", "13": "I-InformationPlace", "14": "I-Responsibility", "15": "I-Reasoning", "16": "B-InformationExposition", "17": "B-ForceStressed", "18": "B-ConfidenceHedged", "19": "B-Character", "20": "B-Updates", "21": "I-InformationReportVerbs", "22": "B-InformationChangePositive", "23": "B-PublicTerms", "24": "I-MetadiscourseCohesive", "25": "O", "26": "B-AcademicTerms", "27": "I-MetadiscourseInteractive", "28": "I-Updates", "29": "I-Negative", "30": "B-InformationPlace", "31": "B-Interactive", "32": "I-AcademicTerms", "33": "I-CitationAuthority", "34": "I-Citation", "35": "B-Narrative", "36": "I-PublicTerms", "37": "B-CitationAuthority", "38": "B-Reasoning", "39": "I-InformationExposition", "40": "I-Facilitate", "41": "B-FirstPerson", "42": "I-ConfidenceHedged", "43": "I-FirstPerson", "44": "I-Character", "45": "B-ConfidenceLow", "46": "B-MetadiscourseCohesive", "47": "B-InformationChangeNegative", "48": "B-Uncertainty", "49": "B-AcademicWritingMoves", "50": "I-ConfidenceLow", "51": "I-Strategic", "52": "I-SyntacticComplexity", "53": "B-Negative", "54": "I-Inquiry", "55": "I-InformationChangeNegative", "56": "I-InformationTopics", "57": "B-Future", "58": "I-ConfidenceHigh", "59": "B-Positive", "60": "B-CitationHedged", "61": "I-CitationHedged", "62": "I-ForceStressed", "63": "B-Inquiry", "64": "I-InformationChangePositive", "65": "B-ConfidenceHigh", "66": "I-Uncertainty", "67": "B-InformationReportVerbs", "68": "I-InformationChange", "69": "B-Citation", "70": "B-InformationStates", "71": "I-Future", "72": "B-Facilitate", "73": "I-Positive", "74": "B-Contingent", "75": "PAD" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "B-AcademicTerms": 26, "B-AcademicWritingMoves": 49, "B-Character": 19, "B-Citation": 69, "B-CitationAuthority": 37, "B-CitationHedged": 60, "B-ConfidenceHedged": 18, "B-ConfidenceHigh": 65, "B-ConfidenceLow": 45, "B-Contingent": 74, "B-Description": 0, "B-Facilitate": 72, "B-FirstPerson": 41, "B-ForceStressed": 17, "B-Future": 57, "B-InformationChange": 8, "B-InformationChangeNegative": 47, "B-InformationChangePositive": 22, "B-InformationExposition": 16, "B-InformationPlace": 30, "B-InformationReportVerbs": 67, "B-InformationStates": 70, "B-InformationTopics": 11, "B-Inquiry": 63, "B-Interactive": 31, "B-MetadiscourseCohesive": 46, "B-MetadiscourseInteractive": 12, "B-Narrative": 35, "B-Negative": 53, "B-Positive": 59, "B-PublicTerms": 23, "B-Reasoning": 38, "B-Responsibility": 1, "B-Strategic": 3, "B-SyntacticComplexity": 7, "B-Uncertainty": 48, "B-Updates": 20, "I-AcademicTerms": 32, "I-AcademicWritingMoves": 2, "I-Character": 44, "I-Citation": 34, "I-CitationAuthority": 33, "I-CitationHedged": 61, "I-ConfidenceHedged": 42, "I-ConfidenceHigh": 58, "I-ConfidenceLow": 50, "I-Contingent": 4, "I-Description": 9, "I-Facilitate": 40, "I-FirstPerson": 43, "I-ForceStressed": 62, "I-Future": 71, "I-InformationChange": 68, "I-InformationChangeNegative": 55, "I-InformationChangePositive": 64, "I-InformationExposition": 39, "I-InformationPlace": 13, "I-InformationReportVerbs": 21, "I-InformationStates": 5, "I-InformationTopics": 56, "I-Inquiry": 54, "I-Interactive": 6, "I-MetadiscourseCohesive": 24, "I-MetadiscourseInteractive": 27, "I-Narrative": 10, "I-Negative": 29, "I-Positive": 73, "I-PublicTerms": 36, "I-Reasoning": 15, "I-Responsibility": 14, "I-Strategic": 51, "I-SyntacticComplexity": 52, "I-Uncertainty": 66, "I-Updates": 28, "O": 25, "PAD": 75 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "transformers_version": "4.3.3", "type_vocab_size": 2, "use_cache": true, "vocab_size": 28996 }