adaptors run
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/args.json +33 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/cola_bert-base-uncased_train_loss.png +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/cola_bert-base-uncased_validation_loss.png +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/logfile.log +205 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/adapter_config.json +41 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/all_results.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/all_results_val.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/eval_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/gpu_stats.json +130 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/head_config.json +21 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_adapter.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_model_head.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/special_tokens_map.json +7 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/tokenizer.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/tokenizer_config.json +56 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/val_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/vocab.txt +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/adapter_config.json +41 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/all_results.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/all_results_val.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/eval_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/gpu_stats.json +130 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/head_config.json +21 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_adapter.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_model_head.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/special_tokens_map.json +7 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/tokenizer.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/tokenizer_config.json +56 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/val_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/vocab.txt +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/adapter_config.json +41 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/all_results.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/all_results_val.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/eval_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/gpu_stats.json +130 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/head_config.json +21 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_adapter.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_model_head.bin +3 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/special_tokens_map.json +7 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/tokenizer.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/tokenizer_config.json +56 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/val_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/vocab.txt +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/adapter_config.json +41 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/all_results.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/all_results_val.json +1 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/eval_res.json +0 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/gpu_stats.json +130 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/head_config.json +21 -0
- outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_adapter.bin +3 -0
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/args.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"task_name": "cola",
|
3 |
+
"train_file": null,
|
4 |
+
"validation_file": null,
|
5 |
+
"max_length": 300,
|
6 |
+
"pad_to_max_length": false,
|
7 |
+
"model_name_or_path": "bert-base-uncased",
|
8 |
+
"use_slow_tokenizer": false,
|
9 |
+
"per_device_train_batch_size": 8,
|
10 |
+
"per_device_eval_batch_size": 8,
|
11 |
+
"learning_rate": 0.0001,
|
12 |
+
"max_grad_norm": 0.5,
|
13 |
+
"weight_decay": 0.0,
|
14 |
+
"num_train_epochs": 3,
|
15 |
+
"max_train_steps": 10000,
|
16 |
+
"gradient_accumulation_steps": 1,
|
17 |
+
"lr_scheduler_type": "linear",
|
18 |
+
"num_warmup_steps": 0,
|
19 |
+
"output_dir": "./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000",
|
20 |
+
"seed": 12345,
|
21 |
+
"push_to_hub": false,
|
22 |
+
"hub_model_id": null,
|
23 |
+
"hub_token": null,
|
24 |
+
"checkpointing_steps": "2500",
|
25 |
+
"resume_from_checkpoint": null,
|
26 |
+
"with_tracking": false,
|
27 |
+
"report_to": "all",
|
28 |
+
"ignore_mismatched_sizes": true,
|
29 |
+
"save_train_results": false,
|
30 |
+
"testing_set": "train_val",
|
31 |
+
"lm_head": true,
|
32 |
+
"leave_out": null
|
33 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/cola_bert-base-uncased_train_loss.png
ADDED
![]() |
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/cola_bert-base-uncased_validation_loss.png
ADDED
![]() |
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/logfile.log
ADDED
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
06/01/2024 01:12:39 - INFO - __main__ - Number of labels detected = 2
|
2 |
+
06/01/2024 01:12:48 - INFO - adapters.heads.model_mixin - Adding head 'default' with config {'head_type': 'masked_lm', 'vocab_size': 30522, 'embedding_size': 768, 'layers': 2, 'activation_function': 'gelu', 'layer_norm': True, 'bias': True, 'shift_labels': False, 'label2id': None}.
|
3 |
+
06/01/2024 01:12:49 - INFO - __main__ - Number of labels detected = 2
|
4 |
+
06/01/2024 01:12:49 - INFO - adapters.heads.model_mixin - Adding head 'cola' with config {'head_type': 'classification', 'num_labels': 2, 'layers': 2, 'activation_function': 'tanh', 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'use_pooler': False, 'bias': True, 'dropout_prob': None}.
|
5 |
+
06/01/2024 01:12:49 - INFO - adapters.configuration.model_adapters_config - Adding adapter 'cola'.
|
6 |
+
06/01/2024 01:12:49 - INFO - __main__ - ================================================================================
|
7 |
+
Name Architecture #Param %Param Active Train
|
8 |
+
--------------------------------------------------------------------------------
|
9 |
+
cola bottleneck 1,789,056 1.634 1 1
|
10 |
+
--------------------------------------------------------------------------------
|
11 |
+
Full model 109,482,240 100.000 0
|
12 |
+
================================================================================
|
13 |
+
06/01/2024 01:12:49 - INFO - __main__ - printing model
|
14 |
+
06/01/2024 01:12:49 - INFO - __main__ - BertAdapterModel(
|
15 |
+
(bert): BertModel(
|
16 |
+
(embeddings): BertEmbeddings(
|
17 |
+
(word_embeddings): Embedding(30522, 768, padding_idx=0)
|
18 |
+
(position_embeddings): Embedding(512, 768)
|
19 |
+
(token_type_embeddings): Embedding(2, 768)
|
20 |
+
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
|
21 |
+
(dropout): Dropout(p=0.1, inplace=False)
|
22 |
+
)
|
23 |
+
(encoder): BertEncoder(
|
24 |
+
(layer): ModuleList(
|
25 |
+
(0-11): 12 x BertLayer(
|
26 |
+
(attention): BertAttention(
|
27 |
+
(self): BertSelfAttentionWithAdapters(
|
28 |
+
(query): LoRALinearTorch(
|
29 |
+
in_features=768, out_features=768, bias=True
|
30 |
+
(loras): ModuleDict()
|
31 |
+
)
|
32 |
+
(key): LoRALinearTorch(
|
33 |
+
in_features=768, out_features=768, bias=True
|
34 |
+
(loras): ModuleDict()
|
35 |
+
)
|
36 |
+
(value): LoRALinearTorch(
|
37 |
+
in_features=768, out_features=768, bias=True
|
38 |
+
(loras): ModuleDict()
|
39 |
+
)
|
40 |
+
(dropout): Dropout(p=0.1, inplace=False)
|
41 |
+
(prefix_tuning): PrefixTuningLayer(
|
42 |
+
(prefix_gates): ModuleDict()
|
43 |
+
(pool): PrefixTuningPool(
|
44 |
+
(prefix_tunings): ModuleDict()
|
45 |
+
)
|
46 |
+
)
|
47 |
+
)
|
48 |
+
(output): BertSelfOutputWithAdapters(
|
49 |
+
(dense): Linear(in_features=768, out_features=768, bias=True)
|
50 |
+
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
|
51 |
+
(dropout): Dropout(p=0.1, inplace=False)
|
52 |
+
(adapters): ModuleDict(
|
53 |
+
(cola): Adapter(
|
54 |
+
(non_linearity): Activation_Function_Class(
|
55 |
+
(f): SiLU()
|
56 |
+
)
|
57 |
+
(adapter_down): Sequential(
|
58 |
+
(0): Linear(in_features=768, out_features=48, bias=True)
|
59 |
+
(1): Activation_Function_Class(
|
60 |
+
(f): SiLU()
|
61 |
+
)
|
62 |
+
)
|
63 |
+
(adapter_up): Linear(in_features=48, out_features=768, bias=True)
|
64 |
+
(dropout): Dropout(p=0.0, inplace=False)
|
65 |
+
)
|
66 |
+
)
|
67 |
+
(adapter_fusion_layer): ModuleDict()
|
68 |
+
)
|
69 |
+
)
|
70 |
+
(intermediate): BertIntermediate(
|
71 |
+
(dense): LoRALinearTorch(
|
72 |
+
in_features=768, out_features=3072, bias=True
|
73 |
+
(loras): ModuleDict()
|
74 |
+
)
|
75 |
+
(intermediate_act_fn): GELUActivation()
|
76 |
+
)
|
77 |
+
(output): BertOutputWithAdapters(
|
78 |
+
(dense): LoRALinearTorch(
|
79 |
+
in_features=3072, out_features=768, bias=True
|
80 |
+
(loras): ModuleDict()
|
81 |
+
)
|
82 |
+
(LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
|
83 |
+
(dropout): Dropout(p=0.1, inplace=False)
|
84 |
+
(adapters): ModuleDict(
|
85 |
+
(cola): Adapter(
|
86 |
+
(non_linearity): Activation_Function_Class(
|
87 |
+
(f): SiLU()
|
88 |
+
)
|
89 |
+
(adapter_down): Sequential(
|
90 |
+
(0): Linear(in_features=768, out_features=48, bias=True)
|
91 |
+
(1): Activation_Function_Class(
|
92 |
+
(f): SiLU()
|
93 |
+
)
|
94 |
+
)
|
95 |
+
(adapter_up): Linear(in_features=48, out_features=768, bias=True)
|
96 |
+
(dropout): Dropout(p=0.0, inplace=False)
|
97 |
+
)
|
98 |
+
)
|
99 |
+
(adapter_fusion_layer): ModuleDict()
|
100 |
+
)
|
101 |
+
)
|
102 |
+
)
|
103 |
+
)
|
104 |
+
(pooler): BertPooler(
|
105 |
+
(dense): Linear(in_features=768, out_features=768, bias=True)
|
106 |
+
(activation): Tanh()
|
107 |
+
)
|
108 |
+
(invertible_adapters): ModuleDict()
|
109 |
+
(shared_parameters): ModuleDict()
|
110 |
+
(prefix_tuning): PrefixTuningPool(
|
111 |
+
(prefix_tunings): ModuleDict()
|
112 |
+
)
|
113 |
+
(prompt_tuning): PromptTuningLayer(
|
114 |
+
(base_model_embeddings): Embedding(30522, 768, padding_idx=0)
|
115 |
+
(prompt_tunings): ModuleDict()
|
116 |
+
)
|
117 |
+
)
|
118 |
+
(heads): ModuleDict(
|
119 |
+
(default): BertStyleMaskedLMHead(
|
120 |
+
(0): Linear(in_features=768, out_features=768, bias=True)
|
121 |
+
(1): Activation_Function_Class(
|
122 |
+
(f): GELUActivation()
|
123 |
+
)
|
124 |
+
(2): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
|
125 |
+
(3): Linear(in_features=768, out_features=30522, bias=True)
|
126 |
+
)
|
127 |
+
(cola): ClassificationHead(
|
128 |
+
(0): Dropout(p=0.1, inplace=False)
|
129 |
+
(1): Linear(in_features=768, out_features=768, bias=True)
|
130 |
+
(2): Activation_Function_Class(
|
131 |
+
(f): Tanh()
|
132 |
+
)
|
133 |
+
(3): Dropout(p=0.1, inplace=False)
|
134 |
+
(4): Linear(in_features=768, out_features=2, bias=True)
|
135 |
+
)
|
136 |
+
)
|
137 |
+
)
|
138 |
+
06/01/2024 01:12:50 - INFO - __main__ - Sample 3412 of the training set: {'input_ids': [101, 1045, 12781, 1996, 7427, 1012, 102], 'token_type_ids': [0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1], 'labels': 1}.
|
139 |
+
06/01/2024 01:12:50 - INFO - __main__ - Sample 6002 of the training set: {'input_ids': [101, 1045, 2442, 2064, 4521, 22088, 2015, 1012, 102], 'token_type_ids': [0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1], 'labels': 0}.
|
140 |
+
06/01/2024 01:12:50 - INFO - __main__ - Sample 83 of the training set: {'input_ids': [101, 1996, 7764, 22257, 2993, 2000, 1996, 2598, 1012, 102], 'token_type_ids': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 'attention_mask': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], 'labels': 0}.
|
141 |
+
06/01/2024 01:12:50 - INFO - __main__ - Max training steps before recalculation = 10000
|
142 |
+
06/01/2024 01:12:50 - INFO - __main__ - num_update_steps_per_epoch initial = 855
|
143 |
+
06/01/2024 01:12:50 - INFO - __main__ - num training epochs initial = 3
|
144 |
+
06/01/2024 01:12:50 - INFO - __main__ - Adjusted num_train_epochs based on max_train_steps: 3
|
145 |
+
06/01/2024 01:12:50 - INFO - __main__ - num_update_steps_per_epoch before recalculation = 855
|
146 |
+
06/01/2024 01:12:50 - INFO - __main__ - num_update_steps_per_epoch after recalculation = 855
|
147 |
+
06/01/2024 01:12:50 - INFO - __main__ - num training epochs before recalculation = 12
|
148 |
+
06/01/2024 01:12:52 - INFO - __main__ - ***** Running training *****
|
149 |
+
06/01/2024 01:12:52 - INFO - __main__ - Num examples = 6840
|
150 |
+
06/01/2024 01:12:52 - INFO - __main__ - Num Epochs = 12
|
151 |
+
06/01/2024 01:12:52 - INFO - __main__ - Instantaneous batch size per device = 8
|
152 |
+
06/01/2024 01:12:52 - INFO - __main__ - Total train batch size (w. parallel, distributed & accumulation) = 8
|
153 |
+
06/01/2024 01:12:52 - INFO - __main__ - Gradient Accumulation steps = 1
|
154 |
+
06/01/2024 01:12:52 - INFO - __main__ - Total optimization steps = 10000
|
155 |
+
06/01/2024 01:12:56 - INFO - __main__ - epoch 0: {'matthews_correlation': -0.02929206145132745}
|
156 |
+
06/01/2024 01:12:56 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/adapter_config.json
|
157 |
+
06/01/2024 01:12:56 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_adapter.bin
|
158 |
+
06/01/2024 01:12:56 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/head_config.json
|
159 |
+
06/01/2024 01:12:56 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_model_head.bin
|
160 |
+
06/01/2024 01:13:02 - INFO - __main__ - epoch 0: {'matthews_correlation': 0.0}
|
161 |
+
06/01/2024 01:13:02 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/adapter_config.json
|
162 |
+
06/01/2024 01:13:02 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_adapter.bin
|
163 |
+
06/01/2024 01:13:02 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/head_config.json
|
164 |
+
06/01/2024 01:13:02 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_model_head.bin
|
165 |
+
06/01/2024 01:15:18 - INFO - __main__ - epoch 2: {'matthews_correlation': 0.5327292010480984}
|
166 |
+
06/01/2024 01:15:18 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/adapter_config.json
|
167 |
+
06/01/2024 01:15:18 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_adapter.bin
|
168 |
+
06/01/2024 01:15:18 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/head_config.json
|
169 |
+
06/01/2024 01:15:18 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_model_head.bin
|
170 |
+
06/01/2024 01:15:25 - INFO - __main__ - epoch 2: {'matthews_correlation': 0.5129119173777361}
|
171 |
+
06/01/2024 01:15:25 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/adapter_config.json
|
172 |
+
06/01/2024 01:15:25 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_adapter.bin
|
173 |
+
06/01/2024 01:15:25 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/head_config.json
|
174 |
+
06/01/2024 01:15:25 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_model_head.bin
|
175 |
+
06/01/2024 01:17:40 - INFO - __main__ - epoch 5: {'matthews_correlation': 0.5377587696044389}
|
176 |
+
06/01/2024 01:17:40 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/adapter_config.json
|
177 |
+
06/01/2024 01:17:40 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_adapter.bin
|
178 |
+
06/01/2024 01:17:40 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/head_config.json
|
179 |
+
06/01/2024 01:17:40 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_model_head.bin
|
180 |
+
06/01/2024 01:17:46 - INFO - __main__ - epoch 5: {'matthews_correlation': 0.5382301098911769}
|
181 |
+
06/01/2024 01:17:46 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/adapter_config.json
|
182 |
+
06/01/2024 01:17:46 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_adapter.bin
|
183 |
+
06/01/2024 01:17:46 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/head_config.json
|
184 |
+
06/01/2024 01:17:46 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_model_head.bin
|
185 |
+
06/01/2024 01:20:03 - INFO - __main__ - epoch 8: {'matthews_correlation': 0.5454001814167142}
|
186 |
+
06/01/2024 01:20:03 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/adapter_config.json
|
187 |
+
06/01/2024 01:20:03 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_adapter.bin
|
188 |
+
06/01/2024 01:20:03 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/head_config.json
|
189 |
+
06/01/2024 01:20:03 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_model_head.bin
|
190 |
+
06/01/2024 01:20:09 - INFO - __main__ - epoch 8: {'matthews_correlation': 0.5577556248606417}
|
191 |
+
06/01/2024 01:20:09 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/adapter_config.json
|
192 |
+
06/01/2024 01:20:09 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_adapter.bin
|
193 |
+
06/01/2024 01:20:09 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/head_config.json
|
194 |
+
06/01/2024 01:20:09 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_model_head.bin
|
195 |
+
06/01/2024 01:22:28 - INFO - __main__ - epoch 11: {'matthews_correlation': 0.5332198659134496}
|
196 |
+
06/01/2024 01:22:28 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/adapter_config.json
|
197 |
+
06/01/2024 01:22:28 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/pytorch_adapter.bin
|
198 |
+
06/01/2024 01:22:28 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/head_config.json
|
199 |
+
06/01/2024 01:22:28 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/pytorch_model_head.bin
|
200 |
+
06/01/2024 01:22:34 - INFO - __main__ - epoch 11: {'matthews_correlation': 0.5514570047640966}
|
201 |
+
06/01/2024 01:22:34 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/adapter_config.json
|
202 |
+
06/01/2024 01:22:35 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/pytorch_adapter.bin
|
203 |
+
06/01/2024 01:22:35 - INFO - adapters.loading - Configuration saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/head_config.json
|
204 |
+
06/01/2024 01:22:35 - INFO - adapters.loading - Module weights saved in ./outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_9999/pytorch_model_head.bin
|
205 |
+
06/01/2024 01:22:35 - INFO - __main__ - ***** Completed training *****
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/adapter_config.json
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"adapter_residual_before_ln": false,
|
4 |
+
"cross_adapter": false,
|
5 |
+
"dropout": 0.0,
|
6 |
+
"factorized_phm_W": true,
|
7 |
+
"factorized_phm_rule": false,
|
8 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
9 |
+
"init_weights": "bert",
|
10 |
+
"inv_adapter": null,
|
11 |
+
"inv_adapter_reduction_factor": null,
|
12 |
+
"is_parallel": false,
|
13 |
+
"learn_phm": true,
|
14 |
+
"leave_out": [],
|
15 |
+
"ln_after": false,
|
16 |
+
"ln_before": false,
|
17 |
+
"mh_adapter": true,
|
18 |
+
"non_linearity": "swish",
|
19 |
+
"original_ln_after": true,
|
20 |
+
"original_ln_before": false,
|
21 |
+
"output_adapter": true,
|
22 |
+
"phm_bias": true,
|
23 |
+
"phm_c_init": "normal",
|
24 |
+
"phm_dim": 4,
|
25 |
+
"phm_init_range": 0.0001,
|
26 |
+
"phm_layer": false,
|
27 |
+
"phm_rank": 1,
|
28 |
+
"reduction_factor": 16,
|
29 |
+
"residual_before_ln": true,
|
30 |
+
"scaling": 1.0,
|
31 |
+
"shared_W_phm": false,
|
32 |
+
"shared_phm_rule": true,
|
33 |
+
"use_gating": false
|
34 |
+
},
|
35 |
+
"hidden_size": 768,
|
36 |
+
"model_class": "BertAdapterModel",
|
37 |
+
"model_name": "bert-base-uncased",
|
38 |
+
"model_type": "bert",
|
39 |
+
"name": "cola",
|
40 |
+
"version": "0.2.1"
|
41 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/all_results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": -0.02929206145132745}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/all_results_val.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.0}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/eval_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/gpu_stats.json
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"memory_allocated": 459623936,
|
3 |
+
"max_memory_allocated": 471682048,
|
4 |
+
"memory_reserved": 532676608,
|
5 |
+
"max_memory_reserved": 532676608,
|
6 |
+
"memory_stats": {
|
7 |
+
"active.all.allocated": 229718,
|
8 |
+
"active.all.current": 319,
|
9 |
+
"active.all.freed": 229399,
|
10 |
+
"active.all.peak": 337,
|
11 |
+
"active.large_pool.allocated": 7614,
|
12 |
+
"active.large_pool.current": 78,
|
13 |
+
"active.large_pool.freed": 7536,
|
14 |
+
"active.large_pool.peak": 80,
|
15 |
+
"active.small_pool.allocated": 222104,
|
16 |
+
"active.small_pool.current": 241,
|
17 |
+
"active.small_pool.freed": 221863,
|
18 |
+
"active.small_pool.peak": 259,
|
19 |
+
"active_bytes.all.allocated": 104486025728,
|
20 |
+
"active_bytes.all.current": 459623936,
|
21 |
+
"active_bytes.all.freed": 104026401792,
|
22 |
+
"active_bytes.all.peak": 471682048,
|
23 |
+
"active_bytes.large_pool.allocated": 16832397312,
|
24 |
+
"active_bytes.large_pool.current": 451805184,
|
25 |
+
"active_bytes.large_pool.freed": 16380592128,
|
26 |
+
"active_bytes.large_pool.peak": 458686464,
|
27 |
+
"active_bytes.small_pool.allocated": 87653628416,
|
28 |
+
"active_bytes.small_pool.current": 7818752,
|
29 |
+
"active_bytes.small_pool.freed": 87645809664,
|
30 |
+
"active_bytes.small_pool.peak": 16436224,
|
31 |
+
"allocated_bytes.all.allocated": 104486025728,
|
32 |
+
"allocated_bytes.all.current": 459623936,
|
33 |
+
"allocated_bytes.all.freed": 104026401792,
|
34 |
+
"allocated_bytes.all.peak": 471682048,
|
35 |
+
"allocated_bytes.large_pool.allocated": 16832397312,
|
36 |
+
"allocated_bytes.large_pool.current": 451805184,
|
37 |
+
"allocated_bytes.large_pool.freed": 16380592128,
|
38 |
+
"allocated_bytes.large_pool.peak": 458686464,
|
39 |
+
"allocated_bytes.small_pool.allocated": 87653628416,
|
40 |
+
"allocated_bytes.small_pool.current": 7818752,
|
41 |
+
"allocated_bytes.small_pool.freed": 87645809664,
|
42 |
+
"allocated_bytes.small_pool.peak": 16436224,
|
43 |
+
"allocation.all.allocated": 229718,
|
44 |
+
"allocation.all.current": 319,
|
45 |
+
"allocation.all.freed": 229399,
|
46 |
+
"allocation.all.peak": 337,
|
47 |
+
"allocation.large_pool.allocated": 7614,
|
48 |
+
"allocation.large_pool.current": 78,
|
49 |
+
"allocation.large_pool.freed": 7536,
|
50 |
+
"allocation.large_pool.peak": 80,
|
51 |
+
"allocation.small_pool.allocated": 222104,
|
52 |
+
"allocation.small_pool.current": 241,
|
53 |
+
"allocation.small_pool.freed": 221863,
|
54 |
+
"allocation.small_pool.peak": 259,
|
55 |
+
"inactive_split.all.allocated": 110877,
|
56 |
+
"inactive_split.all.current": 23,
|
57 |
+
"inactive_split.all.freed": 110854,
|
58 |
+
"inactive_split.all.peak": 34,
|
59 |
+
"inactive_split.large_pool.allocated": 7159,
|
60 |
+
"inactive_split.large_pool.current": 18,
|
61 |
+
"inactive_split.large_pool.freed": 7141,
|
62 |
+
"inactive_split.large_pool.peak": 19,
|
63 |
+
"inactive_split.small_pool.allocated": 103718,
|
64 |
+
"inactive_split.small_pool.current": 5,
|
65 |
+
"inactive_split.small_pool.freed": 103713,
|
66 |
+
"inactive_split.small_pool.peak": 15,
|
67 |
+
"inactive_split_bytes.all.allocated": 111369324032,
|
68 |
+
"inactive_split_bytes.all.current": 43692544,
|
69 |
+
"inactive_split_bytes.all.freed": 111325631488,
|
70 |
+
"inactive_split_bytes.all.peak": 67346432,
|
71 |
+
"inactive_split_bytes.large_pool.allocated": 18599641088,
|
72 |
+
"inactive_split_bytes.large_pool.current": 41025536,
|
73 |
+
"inactive_split_bytes.large_pool.freed": 18558615552,
|
74 |
+
"inactive_split_bytes.large_pool.peak": 59146240,
|
75 |
+
"inactive_split_bytes.small_pool.allocated": 92769682944,
|
76 |
+
"inactive_split_bytes.small_pool.current": 2667008,
|
77 |
+
"inactive_split_bytes.small_pool.freed": 92767015936,
|
78 |
+
"inactive_split_bytes.small_pool.peak": 8200192,
|
79 |
+
"max_split_size": -1,
|
80 |
+
"num_alloc_retries": 0,
|
81 |
+
"num_device_alloc": 30,
|
82 |
+
"num_device_free": 0,
|
83 |
+
"num_ooms": 0,
|
84 |
+
"num_sync_all_streams": 0,
|
85 |
+
"oversize_allocations.allocated": 0,
|
86 |
+
"oversize_allocations.current": 0,
|
87 |
+
"oversize_allocations.freed": 0,
|
88 |
+
"oversize_allocations.peak": 0,
|
89 |
+
"oversize_segments.allocated": 0,
|
90 |
+
"oversize_segments.current": 0,
|
91 |
+
"oversize_segments.freed": 0,
|
92 |
+
"oversize_segments.peak": 0,
|
93 |
+
"requested_bytes.all.allocated": 100127988300,
|
94 |
+
"requested_bytes.all.current": 458479208,
|
95 |
+
"requested_bytes.all.freed": 99669509092,
|
96 |
+
"requested_bytes.all.peak": 470535000,
|
97 |
+
"requested_bytes.large_pool.allocated": 12486326272,
|
98 |
+
"requested_bytes.large_pool.current": 450672640,
|
99 |
+
"requested_bytes.large_pool.freed": 12035653632,
|
100 |
+
"requested_bytes.large_pool.peak": 457553920,
|
101 |
+
"requested_bytes.small_pool.allocated": 87641662028,
|
102 |
+
"requested_bytes.small_pool.current": 7806568,
|
103 |
+
"requested_bytes.small_pool.freed": 87633855460,
|
104 |
+
"requested_bytes.small_pool.peak": 16421720,
|
105 |
+
"reserved_bytes.all.allocated": 532676608,
|
106 |
+
"reserved_bytes.all.current": 532676608,
|
107 |
+
"reserved_bytes.all.freed": 0,
|
108 |
+
"reserved_bytes.all.peak": 532676608,
|
109 |
+
"reserved_bytes.large_pool.allocated": 513802240,
|
110 |
+
"reserved_bytes.large_pool.current": 513802240,
|
111 |
+
"reserved_bytes.large_pool.freed": 0,
|
112 |
+
"reserved_bytes.large_pool.peak": 513802240,
|
113 |
+
"reserved_bytes.small_pool.allocated": 18874368,
|
114 |
+
"reserved_bytes.small_pool.current": 18874368,
|
115 |
+
"reserved_bytes.small_pool.freed": 0,
|
116 |
+
"reserved_bytes.small_pool.peak": 18874368,
|
117 |
+
"segment.all.allocated": 30,
|
118 |
+
"segment.all.current": 30,
|
119 |
+
"segment.all.freed": 0,
|
120 |
+
"segment.all.peak": 30,
|
121 |
+
"segment.large_pool.allocated": 21,
|
122 |
+
"segment.large_pool.current": 21,
|
123 |
+
"segment.large_pool.freed": 0,
|
124 |
+
"segment.large_pool.peak": 21,
|
125 |
+
"segment.small_pool.allocated": 9,
|
126 |
+
"segment.small_pool.current": 9,
|
127 |
+
"segment.small_pool.freed": 0,
|
128 |
+
"segment.small_pool.peak": 9
|
129 |
+
}
|
130 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/head_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"activation_function": "tanh",
|
4 |
+
"bias": true,
|
5 |
+
"dropout_prob": null,
|
6 |
+
"head_type": "classification",
|
7 |
+
"label2id": {
|
8 |
+
"LABEL_0": 0,
|
9 |
+
"LABEL_1": 1
|
10 |
+
},
|
11 |
+
"layers": 2,
|
12 |
+
"num_labels": 2,
|
13 |
+
"use_pooler": false
|
14 |
+
},
|
15 |
+
"hidden_size": 768,
|
16 |
+
"model_class": "BertAdapterModel",
|
17 |
+
"model_name": "bert-base-uncased",
|
18 |
+
"model_type": "bert",
|
19 |
+
"name": "cola",
|
20 |
+
"version": "0.2.1"
|
21 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_adapter.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29b06f7b9617fec3c5bc63794ca9c89f4238d46db9edb1de32e4dd4e5086fae0
|
3 |
+
size 7191062
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/pytorch_model_head.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:244a549f85f7dcaa9f5082c6ca9f115dff60bbcbbd8534f0a78f92ef9b70720d
|
3 |
+
size 2370664
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/special_tokens_map.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": "[CLS]",
|
3 |
+
"mask_token": "[MASK]",
|
4 |
+
"pad_token": "[PAD]",
|
5 |
+
"sep_token": "[SEP]",
|
6 |
+
"unk_token": "[UNK]"
|
7 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/tokenizer_config.json
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[PAD]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"100": {
|
12 |
+
"content": "[UNK]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"101": {
|
20 |
+
"content": "[CLS]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"102": {
|
28 |
+
"content": "[SEP]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"103": {
|
36 |
+
"content": "[MASK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"clean_up_tokenization_spaces": true,
|
45 |
+
"cls_token": "[CLS]",
|
46 |
+
"do_lower_case": true,
|
47 |
+
"mask_token": "[MASK]",
|
48 |
+
"model_max_length": 512,
|
49 |
+
"pad_token": "[PAD]",
|
50 |
+
"padding_side": "left",
|
51 |
+
"sep_token": "[SEP]",
|
52 |
+
"strip_accents": null,
|
53 |
+
"tokenize_chinese_chars": true,
|
54 |
+
"tokenizer_class": "BertTokenizer",
|
55 |
+
"unk_token": "[UNK]"
|
56 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/val_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_0/vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/adapter_config.json
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"adapter_residual_before_ln": false,
|
4 |
+
"cross_adapter": false,
|
5 |
+
"dropout": 0.0,
|
6 |
+
"factorized_phm_W": true,
|
7 |
+
"factorized_phm_rule": false,
|
8 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
9 |
+
"init_weights": "bert",
|
10 |
+
"inv_adapter": null,
|
11 |
+
"inv_adapter_reduction_factor": null,
|
12 |
+
"is_parallel": false,
|
13 |
+
"learn_phm": true,
|
14 |
+
"leave_out": [],
|
15 |
+
"ln_after": false,
|
16 |
+
"ln_before": false,
|
17 |
+
"mh_adapter": true,
|
18 |
+
"non_linearity": "swish",
|
19 |
+
"original_ln_after": true,
|
20 |
+
"original_ln_before": false,
|
21 |
+
"output_adapter": true,
|
22 |
+
"phm_bias": true,
|
23 |
+
"phm_c_init": "normal",
|
24 |
+
"phm_dim": 4,
|
25 |
+
"phm_init_range": 0.0001,
|
26 |
+
"phm_layer": false,
|
27 |
+
"phm_rank": 1,
|
28 |
+
"reduction_factor": 16,
|
29 |
+
"residual_before_ln": true,
|
30 |
+
"scaling": 1.0,
|
31 |
+
"shared_W_phm": false,
|
32 |
+
"shared_phm_rule": true,
|
33 |
+
"use_gating": false
|
34 |
+
},
|
35 |
+
"hidden_size": 768,
|
36 |
+
"model_class": "BertAdapterModel",
|
37 |
+
"model_name": "bert-base-uncased",
|
38 |
+
"model_type": "bert",
|
39 |
+
"name": "cola",
|
40 |
+
"version": "0.2.1"
|
41 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/all_results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5327292010480984}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/all_results_val.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5129119173777361}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/eval_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/gpu_stats.json
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"memory_allocated": 487207936,
|
3 |
+
"max_memory_allocated": 699914240,
|
4 |
+
"memory_reserved": 805306368,
|
5 |
+
"max_memory_reserved": 805306368,
|
6 |
+
"memory_stats": {
|
7 |
+
"active.all.allocated": 3730517,
|
8 |
+
"active.all.current": 520,
|
9 |
+
"active.all.freed": 3729997,
|
10 |
+
"active.all.peak": 777,
|
11 |
+
"active.large_pool.allocated": 141558,
|
12 |
+
"active.large_pool.current": 81,
|
13 |
+
"active.large_pool.freed": 141477,
|
14 |
+
"active.large_pool.peak": 183,
|
15 |
+
"active.small_pool.allocated": 3588959,
|
16 |
+
"active.small_pool.current": 439,
|
17 |
+
"active.small_pool.freed": 3588520,
|
18 |
+
"active.small_pool.peak": 696,
|
19 |
+
"active_bytes.all.allocated": 1562416010752,
|
20 |
+
"active_bytes.all.current": 487207936,
|
21 |
+
"active_bytes.all.freed": 1561928802816,
|
22 |
+
"active_bytes.all.peak": 699914240,
|
23 |
+
"active_bytes.large_pool.allocated": 312991645696,
|
24 |
+
"active_bytes.large_pool.current": 465043456,
|
25 |
+
"active_bytes.large_pool.freed": 312526602240,
|
26 |
+
"active_bytes.large_pool.peak": 646561792,
|
27 |
+
"active_bytes.small_pool.allocated": 1249424365056,
|
28 |
+
"active_bytes.small_pool.current": 22164480,
|
29 |
+
"active_bytes.small_pool.freed": 1249402200576,
|
30 |
+
"active_bytes.small_pool.peak": 133540864,
|
31 |
+
"allocated_bytes.all.allocated": 1562416010752,
|
32 |
+
"allocated_bytes.all.current": 487207936,
|
33 |
+
"allocated_bytes.all.freed": 1561928802816,
|
34 |
+
"allocated_bytes.all.peak": 699914240,
|
35 |
+
"allocated_bytes.large_pool.allocated": 312991645696,
|
36 |
+
"allocated_bytes.large_pool.current": 465043456,
|
37 |
+
"allocated_bytes.large_pool.freed": 312526602240,
|
38 |
+
"allocated_bytes.large_pool.peak": 646561792,
|
39 |
+
"allocated_bytes.small_pool.allocated": 1249424365056,
|
40 |
+
"allocated_bytes.small_pool.current": 22164480,
|
41 |
+
"allocated_bytes.small_pool.freed": 1249402200576,
|
42 |
+
"allocated_bytes.small_pool.peak": 133540864,
|
43 |
+
"allocation.all.allocated": 3730517,
|
44 |
+
"allocation.all.current": 520,
|
45 |
+
"allocation.all.freed": 3729997,
|
46 |
+
"allocation.all.peak": 777,
|
47 |
+
"allocation.large_pool.allocated": 141558,
|
48 |
+
"allocation.large_pool.current": 81,
|
49 |
+
"allocation.large_pool.freed": 141477,
|
50 |
+
"allocation.large_pool.peak": 183,
|
51 |
+
"allocation.small_pool.allocated": 3588959,
|
52 |
+
"allocation.small_pool.current": 439,
|
53 |
+
"allocation.small_pool.freed": 3588520,
|
54 |
+
"allocation.small_pool.peak": 696,
|
55 |
+
"inactive_split.all.allocated": 2037607,
|
56 |
+
"inactive_split.all.current": 46,
|
57 |
+
"inactive_split.all.freed": 2037561,
|
58 |
+
"inactive_split.all.peak": 119,
|
59 |
+
"inactive_split.large_pool.allocated": 120406,
|
60 |
+
"inactive_split.large_pool.current": 19,
|
61 |
+
"inactive_split.large_pool.freed": 120387,
|
62 |
+
"inactive_split.large_pool.peak": 23,
|
63 |
+
"inactive_split.small_pool.allocated": 1917201,
|
64 |
+
"inactive_split.small_pool.current": 27,
|
65 |
+
"inactive_split.small_pool.freed": 1917174,
|
66 |
+
"inactive_split.small_pool.peak": 99,
|
67 |
+
"inactive_split_bytes.all.allocated": 1674415169536,
|
68 |
+
"inactive_split_bytes.all.current": 60148736,
|
69 |
+
"inactive_split_bytes.all.freed": 1674355020800,
|
70 |
+
"inactive_split_bytes.all.peak": 149930496,
|
71 |
+
"inactive_split_bytes.large_pool.allocated": 355511451648,
|
72 |
+
"inactive_split_bytes.large_pool.current": 48758784,
|
73 |
+
"inactive_split_bytes.large_pool.freed": 355462692864,
|
74 |
+
"inactive_split_bytes.large_pool.peak": 75759616,
|
75 |
+
"inactive_split_bytes.small_pool.allocated": 1318903717888,
|
76 |
+
"inactive_split_bytes.small_pool.current": 11389952,
|
77 |
+
"inactive_split_bytes.small_pool.freed": 1318892327936,
|
78 |
+
"inactive_split_bytes.small_pool.peak": 82985472,
|
79 |
+
"max_split_size": -1,
|
80 |
+
"num_alloc_retries": 0,
|
81 |
+
"num_device_alloc": 97,
|
82 |
+
"num_device_free": 0,
|
83 |
+
"num_ooms": 0,
|
84 |
+
"num_sync_all_streams": 0,
|
85 |
+
"oversize_allocations.allocated": 0,
|
86 |
+
"oversize_allocations.current": 0,
|
87 |
+
"oversize_allocations.freed": 0,
|
88 |
+
"oversize_allocations.peak": 0,
|
89 |
+
"oversize_segments.allocated": 0,
|
90 |
+
"oversize_segments.current": 0,
|
91 |
+
"oversize_segments.freed": 0,
|
92 |
+
"oversize_segments.peak": 0,
|
93 |
+
"requested_bytes.all.allocated": 1505878546100,
|
94 |
+
"requested_bytes.all.current": 486046648,
|
95 |
+
"requested_bytes.all.freed": 1505392499452,
|
96 |
+
"requested_bytes.all.peak": 680092736,
|
97 |
+
"requested_bytes.large_pool.allocated": 256608180224,
|
98 |
+
"requested_bytes.large_pool.current": 463910912,
|
99 |
+
"requested_bytes.large_pool.freed": 256144269312,
|
100 |
+
"requested_bytes.large_pool.peak": 626776064,
|
101 |
+
"requested_bytes.small_pool.allocated": 1249270365876,
|
102 |
+
"requested_bytes.small_pool.current": 22135736,
|
103 |
+
"requested_bytes.small_pool.freed": 1249248230140,
|
104 |
+
"requested_bytes.small_pool.peak": 133501696,
|
105 |
+
"reserved_bytes.all.allocated": 805306368,
|
106 |
+
"reserved_bytes.all.current": 805306368,
|
107 |
+
"reserved_bytes.all.freed": 0,
|
108 |
+
"reserved_bytes.all.peak": 805306368,
|
109 |
+
"reserved_bytes.large_pool.allocated": 660602880,
|
110 |
+
"reserved_bytes.large_pool.current": 660602880,
|
111 |
+
"reserved_bytes.large_pool.freed": 0,
|
112 |
+
"reserved_bytes.large_pool.peak": 660602880,
|
113 |
+
"reserved_bytes.small_pool.allocated": 144703488,
|
114 |
+
"reserved_bytes.small_pool.current": 144703488,
|
115 |
+
"reserved_bytes.small_pool.freed": 0,
|
116 |
+
"reserved_bytes.small_pool.peak": 144703488,
|
117 |
+
"segment.all.allocated": 97,
|
118 |
+
"segment.all.current": 97,
|
119 |
+
"segment.all.freed": 0,
|
120 |
+
"segment.all.peak": 97,
|
121 |
+
"segment.large_pool.allocated": 28,
|
122 |
+
"segment.large_pool.current": 28,
|
123 |
+
"segment.large_pool.freed": 0,
|
124 |
+
"segment.large_pool.peak": 28,
|
125 |
+
"segment.small_pool.allocated": 69,
|
126 |
+
"segment.small_pool.current": 69,
|
127 |
+
"segment.small_pool.freed": 0,
|
128 |
+
"segment.small_pool.peak": 69
|
129 |
+
}
|
130 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/head_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"activation_function": "tanh",
|
4 |
+
"bias": true,
|
5 |
+
"dropout_prob": null,
|
6 |
+
"head_type": "classification",
|
7 |
+
"label2id": {
|
8 |
+
"LABEL_0": 0,
|
9 |
+
"LABEL_1": 1
|
10 |
+
},
|
11 |
+
"layers": 2,
|
12 |
+
"num_labels": 2,
|
13 |
+
"use_pooler": false
|
14 |
+
},
|
15 |
+
"hidden_size": 768,
|
16 |
+
"model_class": "BertAdapterModel",
|
17 |
+
"model_name": "bert-base-uncased",
|
18 |
+
"model_type": "bert",
|
19 |
+
"name": "cola",
|
20 |
+
"version": "0.2.1"
|
21 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_adapter.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5134ca3cfee61e335e337783a5876ed9cd5b576708a1d9bb7e6304ea56fda07e
|
3 |
+
size 7191062
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/pytorch_model_head.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c0a7ac05ba10838b335682f54ec38f45f5cbdd8c34f16fddf95ab0e3c27b6e12
|
3 |
+
size 2370664
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/special_tokens_map.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": "[CLS]",
|
3 |
+
"mask_token": "[MASK]",
|
4 |
+
"pad_token": "[PAD]",
|
5 |
+
"sep_token": "[SEP]",
|
6 |
+
"unk_token": "[UNK]"
|
7 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/tokenizer_config.json
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[PAD]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"100": {
|
12 |
+
"content": "[UNK]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"101": {
|
20 |
+
"content": "[CLS]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"102": {
|
28 |
+
"content": "[SEP]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"103": {
|
36 |
+
"content": "[MASK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"clean_up_tokenization_spaces": true,
|
45 |
+
"cls_token": "[CLS]",
|
46 |
+
"do_lower_case": true,
|
47 |
+
"mask_token": "[MASK]",
|
48 |
+
"model_max_length": 512,
|
49 |
+
"pad_token": "[PAD]",
|
50 |
+
"padding_side": "left",
|
51 |
+
"sep_token": "[SEP]",
|
52 |
+
"strip_accents": null,
|
53 |
+
"tokenize_chinese_chars": true,
|
54 |
+
"tokenizer_class": "BertTokenizer",
|
55 |
+
"unk_token": "[UNK]"
|
56 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/val_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_2499/vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/adapter_config.json
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"adapter_residual_before_ln": false,
|
4 |
+
"cross_adapter": false,
|
5 |
+
"dropout": 0.0,
|
6 |
+
"factorized_phm_W": true,
|
7 |
+
"factorized_phm_rule": false,
|
8 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
9 |
+
"init_weights": "bert",
|
10 |
+
"inv_adapter": null,
|
11 |
+
"inv_adapter_reduction_factor": null,
|
12 |
+
"is_parallel": false,
|
13 |
+
"learn_phm": true,
|
14 |
+
"leave_out": [],
|
15 |
+
"ln_after": false,
|
16 |
+
"ln_before": false,
|
17 |
+
"mh_adapter": true,
|
18 |
+
"non_linearity": "swish",
|
19 |
+
"original_ln_after": true,
|
20 |
+
"original_ln_before": false,
|
21 |
+
"output_adapter": true,
|
22 |
+
"phm_bias": true,
|
23 |
+
"phm_c_init": "normal",
|
24 |
+
"phm_dim": 4,
|
25 |
+
"phm_init_range": 0.0001,
|
26 |
+
"phm_layer": false,
|
27 |
+
"phm_rank": 1,
|
28 |
+
"reduction_factor": 16,
|
29 |
+
"residual_before_ln": true,
|
30 |
+
"scaling": 1.0,
|
31 |
+
"shared_W_phm": false,
|
32 |
+
"shared_phm_rule": true,
|
33 |
+
"use_gating": false
|
34 |
+
},
|
35 |
+
"hidden_size": 768,
|
36 |
+
"model_class": "BertAdapterModel",
|
37 |
+
"model_name": "bert-base-uncased",
|
38 |
+
"model_type": "bert",
|
39 |
+
"name": "cola",
|
40 |
+
"version": "0.2.1"
|
41 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/all_results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5377587696044389}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/all_results_val.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5382301098911769}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/eval_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/gpu_stats.json
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"memory_allocated": 487209472,
|
3 |
+
"max_memory_allocated": 699914240,
|
4 |
+
"memory_reserved": 805306368,
|
5 |
+
"max_memory_reserved": 805306368,
|
6 |
+
"memory_stats": {
|
7 |
+
"active.all.allocated": 7232424,
|
8 |
+
"active.all.current": 520,
|
9 |
+
"active.all.freed": 7231904,
|
10 |
+
"active.all.peak": 777,
|
11 |
+
"active.large_pool.allocated": 275501,
|
12 |
+
"active.large_pool.current": 81,
|
13 |
+
"active.large_pool.freed": 275420,
|
14 |
+
"active.large_pool.peak": 183,
|
15 |
+
"active.small_pool.allocated": 6956923,
|
16 |
+
"active.small_pool.current": 439,
|
17 |
+
"active.small_pool.freed": 6956484,
|
18 |
+
"active.small_pool.peak": 696,
|
19 |
+
"active_bytes.all.allocated": 3019362437632,
|
20 |
+
"active_bytes.all.current": 487209472,
|
21 |
+
"active_bytes.all.freed": 3018875228160,
|
22 |
+
"active_bytes.all.peak": 699914240,
|
23 |
+
"active_bytes.large_pool.allocated": 608966467584,
|
24 |
+
"active_bytes.large_pool.current": 465043456,
|
25 |
+
"active_bytes.large_pool.freed": 608501424128,
|
26 |
+
"active_bytes.large_pool.peak": 646561792,
|
27 |
+
"active_bytes.small_pool.allocated": 2410395970048,
|
28 |
+
"active_bytes.small_pool.current": 22166016,
|
29 |
+
"active_bytes.small_pool.freed": 2410373804032,
|
30 |
+
"active_bytes.small_pool.peak": 133540864,
|
31 |
+
"allocated_bytes.all.allocated": 3019362437632,
|
32 |
+
"allocated_bytes.all.current": 487209472,
|
33 |
+
"allocated_bytes.all.freed": 3018875228160,
|
34 |
+
"allocated_bytes.all.peak": 699914240,
|
35 |
+
"allocated_bytes.large_pool.allocated": 608966467584,
|
36 |
+
"allocated_bytes.large_pool.current": 465043456,
|
37 |
+
"allocated_bytes.large_pool.freed": 608501424128,
|
38 |
+
"allocated_bytes.large_pool.peak": 646561792,
|
39 |
+
"allocated_bytes.small_pool.allocated": 2410395970048,
|
40 |
+
"allocated_bytes.small_pool.current": 22166016,
|
41 |
+
"allocated_bytes.small_pool.freed": 2410373804032,
|
42 |
+
"allocated_bytes.small_pool.peak": 133540864,
|
43 |
+
"allocation.all.allocated": 7232424,
|
44 |
+
"allocation.all.current": 520,
|
45 |
+
"allocation.all.freed": 7231904,
|
46 |
+
"allocation.all.peak": 777,
|
47 |
+
"allocation.large_pool.allocated": 275501,
|
48 |
+
"allocation.large_pool.current": 81,
|
49 |
+
"allocation.large_pool.freed": 275420,
|
50 |
+
"allocation.large_pool.peak": 183,
|
51 |
+
"allocation.small_pool.allocated": 6956923,
|
52 |
+
"allocation.small_pool.current": 439,
|
53 |
+
"allocation.small_pool.freed": 6956484,
|
54 |
+
"allocation.small_pool.peak": 696,
|
55 |
+
"inactive_split.all.allocated": 3951350,
|
56 |
+
"inactive_split.all.current": 47,
|
57 |
+
"inactive_split.all.freed": 3951303,
|
58 |
+
"inactive_split.all.peak": 123,
|
59 |
+
"inactive_split.large_pool.allocated": 233878,
|
60 |
+
"inactive_split.large_pool.current": 19,
|
61 |
+
"inactive_split.large_pool.freed": 233859,
|
62 |
+
"inactive_split.large_pool.peak": 23,
|
63 |
+
"inactive_split.small_pool.allocated": 3717472,
|
64 |
+
"inactive_split.small_pool.current": 28,
|
65 |
+
"inactive_split.small_pool.freed": 3717444,
|
66 |
+
"inactive_split.small_pool.peak": 103,
|
67 |
+
"inactive_split_bytes.all.allocated": 3236389921280,
|
68 |
+
"inactive_split_bytes.all.current": 60147200,
|
69 |
+
"inactive_split_bytes.all.freed": 3236329774080,
|
70 |
+
"inactive_split_bytes.all.peak": 149930496,
|
71 |
+
"inactive_split_bytes.large_pool.allocated": 691809476608,
|
72 |
+
"inactive_split_bytes.large_pool.current": 48758784,
|
73 |
+
"inactive_split_bytes.large_pool.freed": 691760717824,
|
74 |
+
"inactive_split_bytes.large_pool.peak": 75759616,
|
75 |
+
"inactive_split_bytes.small_pool.allocated": 2544580444672,
|
76 |
+
"inactive_split_bytes.small_pool.current": 11388416,
|
77 |
+
"inactive_split_bytes.small_pool.freed": 2544569056256,
|
78 |
+
"inactive_split_bytes.small_pool.peak": 82985472,
|
79 |
+
"max_split_size": -1,
|
80 |
+
"num_alloc_retries": 0,
|
81 |
+
"num_device_alloc": 97,
|
82 |
+
"num_device_free": 0,
|
83 |
+
"num_ooms": 0,
|
84 |
+
"num_sync_all_streams": 0,
|
85 |
+
"oversize_allocations.allocated": 0,
|
86 |
+
"oversize_allocations.current": 0,
|
87 |
+
"oversize_allocations.freed": 0,
|
88 |
+
"oversize_allocations.peak": 0,
|
89 |
+
"oversize_segments.allocated": 0,
|
90 |
+
"oversize_segments.current": 0,
|
91 |
+
"oversize_segments.freed": 0,
|
92 |
+
"oversize_segments.peak": 0,
|
93 |
+
"requested_bytes.all.allocated": 2910500708864,
|
94 |
+
"requested_bytes.all.current": 486047224,
|
95 |
+
"requested_bytes.all.freed": 2910014661640,
|
96 |
+
"requested_bytes.all.peak": 680092736,
|
97 |
+
"requested_bytes.large_pool.allocated": 500400650240,
|
98 |
+
"requested_bytes.large_pool.current": 463910912,
|
99 |
+
"requested_bytes.large_pool.freed": 499936739328,
|
100 |
+
"requested_bytes.large_pool.peak": 626776064,
|
101 |
+
"requested_bytes.small_pool.allocated": 2410100058624,
|
102 |
+
"requested_bytes.small_pool.current": 22136312,
|
103 |
+
"requested_bytes.small_pool.freed": 2410077922312,
|
104 |
+
"requested_bytes.small_pool.peak": 133501696,
|
105 |
+
"reserved_bytes.all.allocated": 805306368,
|
106 |
+
"reserved_bytes.all.current": 805306368,
|
107 |
+
"reserved_bytes.all.freed": 0,
|
108 |
+
"reserved_bytes.all.peak": 805306368,
|
109 |
+
"reserved_bytes.large_pool.allocated": 660602880,
|
110 |
+
"reserved_bytes.large_pool.current": 660602880,
|
111 |
+
"reserved_bytes.large_pool.freed": 0,
|
112 |
+
"reserved_bytes.large_pool.peak": 660602880,
|
113 |
+
"reserved_bytes.small_pool.allocated": 144703488,
|
114 |
+
"reserved_bytes.small_pool.current": 144703488,
|
115 |
+
"reserved_bytes.small_pool.freed": 0,
|
116 |
+
"reserved_bytes.small_pool.peak": 144703488,
|
117 |
+
"segment.all.allocated": 97,
|
118 |
+
"segment.all.current": 97,
|
119 |
+
"segment.all.freed": 0,
|
120 |
+
"segment.all.peak": 97,
|
121 |
+
"segment.large_pool.allocated": 28,
|
122 |
+
"segment.large_pool.current": 28,
|
123 |
+
"segment.large_pool.freed": 0,
|
124 |
+
"segment.large_pool.peak": 28,
|
125 |
+
"segment.small_pool.allocated": 69,
|
126 |
+
"segment.small_pool.current": 69,
|
127 |
+
"segment.small_pool.freed": 0,
|
128 |
+
"segment.small_pool.peak": 69
|
129 |
+
}
|
130 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/head_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"activation_function": "tanh",
|
4 |
+
"bias": true,
|
5 |
+
"dropout_prob": null,
|
6 |
+
"head_type": "classification",
|
7 |
+
"label2id": {
|
8 |
+
"LABEL_0": 0,
|
9 |
+
"LABEL_1": 1
|
10 |
+
},
|
11 |
+
"layers": 2,
|
12 |
+
"num_labels": 2,
|
13 |
+
"use_pooler": false
|
14 |
+
},
|
15 |
+
"hidden_size": 768,
|
16 |
+
"model_class": "BertAdapterModel",
|
17 |
+
"model_name": "bert-base-uncased",
|
18 |
+
"model_type": "bert",
|
19 |
+
"name": "cola",
|
20 |
+
"version": "0.2.1"
|
21 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_adapter.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f127b13ca20b0cf79fa5e76a4b6f405d1af714f891fabec3ca17cc21a8de33b8
|
3 |
+
size 7191062
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/pytorch_model_head.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:01f84aeff68abbfe48604e91ae608be12d5470e40b4bfa6e4c790ec9ec58e932
|
3 |
+
size 2370664
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/special_tokens_map.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": "[CLS]",
|
3 |
+
"mask_token": "[MASK]",
|
4 |
+
"pad_token": "[PAD]",
|
5 |
+
"sep_token": "[SEP]",
|
6 |
+
"unk_token": "[UNK]"
|
7 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/tokenizer_config.json
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "[PAD]",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"100": {
|
12 |
+
"content": "[UNK]",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"101": {
|
20 |
+
"content": "[CLS]",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"102": {
|
28 |
+
"content": "[SEP]",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"103": {
|
36 |
+
"content": "[MASK]",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"clean_up_tokenization_spaces": true,
|
45 |
+
"cls_token": "[CLS]",
|
46 |
+
"do_lower_case": true,
|
47 |
+
"mask_token": "[MASK]",
|
48 |
+
"model_max_length": 512,
|
49 |
+
"pad_token": "[PAD]",
|
50 |
+
"padding_side": "left",
|
51 |
+
"sep_token": "[SEP]",
|
52 |
+
"strip_accents": null,
|
53 |
+
"tokenize_chinese_chars": true,
|
54 |
+
"tokenizer_class": "BertTokenizer",
|
55 |
+
"unk_token": "[UNK]"
|
56 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/val_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_4999/vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/adapter_config.json
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"adapter_residual_before_ln": false,
|
4 |
+
"cross_adapter": false,
|
5 |
+
"dropout": 0.0,
|
6 |
+
"factorized_phm_W": true,
|
7 |
+
"factorized_phm_rule": false,
|
8 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
9 |
+
"init_weights": "bert",
|
10 |
+
"inv_adapter": null,
|
11 |
+
"inv_adapter_reduction_factor": null,
|
12 |
+
"is_parallel": false,
|
13 |
+
"learn_phm": true,
|
14 |
+
"leave_out": [],
|
15 |
+
"ln_after": false,
|
16 |
+
"ln_before": false,
|
17 |
+
"mh_adapter": true,
|
18 |
+
"non_linearity": "swish",
|
19 |
+
"original_ln_after": true,
|
20 |
+
"original_ln_before": false,
|
21 |
+
"output_adapter": true,
|
22 |
+
"phm_bias": true,
|
23 |
+
"phm_c_init": "normal",
|
24 |
+
"phm_dim": 4,
|
25 |
+
"phm_init_range": 0.0001,
|
26 |
+
"phm_layer": false,
|
27 |
+
"phm_rank": 1,
|
28 |
+
"reduction_factor": 16,
|
29 |
+
"residual_before_ln": true,
|
30 |
+
"scaling": 1.0,
|
31 |
+
"shared_W_phm": false,
|
32 |
+
"shared_phm_rule": true,
|
33 |
+
"use_gating": false
|
34 |
+
},
|
35 |
+
"hidden_size": 768,
|
36 |
+
"model_class": "BertAdapterModel",
|
37 |
+
"model_name": "bert-base-uncased",
|
38 |
+
"model_type": "bert",
|
39 |
+
"name": "cola",
|
40 |
+
"version": "0.2.1"
|
41 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/all_results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5454001814167142}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/all_results_val.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"eval_matthews_correlation": 0.5577556248606417}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/eval_res.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/gpu_stats.json
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"memory_allocated": 487207936,
|
3 |
+
"max_memory_allocated": 699914240,
|
4 |
+
"memory_reserved": 805306368,
|
5 |
+
"max_memory_reserved": 805306368,
|
6 |
+
"memory_stats": {
|
7 |
+
"active.all.allocated": 10734331,
|
8 |
+
"active.all.current": 520,
|
9 |
+
"active.all.freed": 10733811,
|
10 |
+
"active.all.peak": 777,
|
11 |
+
"active.large_pool.allocated": 409252,
|
12 |
+
"active.large_pool.current": 81,
|
13 |
+
"active.large_pool.freed": 409171,
|
14 |
+
"active.large_pool.peak": 183,
|
15 |
+
"active.small_pool.allocated": 10325079,
|
16 |
+
"active.small_pool.current": 439,
|
17 |
+
"active.small_pool.freed": 10324640,
|
18 |
+
"active.small_pool.peak": 696,
|
19 |
+
"active_bytes.all.allocated": 4478907381248,
|
20 |
+
"active_bytes.all.current": 487207936,
|
21 |
+
"active_bytes.all.freed": 4478420173312,
|
22 |
+
"active_bytes.all.peak": 699914240,
|
23 |
+
"active_bytes.large_pool.allocated": 904929984512,
|
24 |
+
"active_bytes.large_pool.current": 465043456,
|
25 |
+
"active_bytes.large_pool.freed": 904464941056,
|
26 |
+
"active_bytes.large_pool.peak": 646561792,
|
27 |
+
"active_bytes.small_pool.allocated": 3573977396736,
|
28 |
+
"active_bytes.small_pool.current": 22164480,
|
29 |
+
"active_bytes.small_pool.freed": 3573955232256,
|
30 |
+
"active_bytes.small_pool.peak": 133540864,
|
31 |
+
"allocated_bytes.all.allocated": 4478907381248,
|
32 |
+
"allocated_bytes.all.current": 487207936,
|
33 |
+
"allocated_bytes.all.freed": 4478420173312,
|
34 |
+
"allocated_bytes.all.peak": 699914240,
|
35 |
+
"allocated_bytes.large_pool.allocated": 904929984512,
|
36 |
+
"allocated_bytes.large_pool.current": 465043456,
|
37 |
+
"allocated_bytes.large_pool.freed": 904464941056,
|
38 |
+
"allocated_bytes.large_pool.peak": 646561792,
|
39 |
+
"allocated_bytes.small_pool.allocated": 3573977396736,
|
40 |
+
"allocated_bytes.small_pool.current": 22164480,
|
41 |
+
"allocated_bytes.small_pool.freed": 3573955232256,
|
42 |
+
"allocated_bytes.small_pool.peak": 133540864,
|
43 |
+
"allocation.all.allocated": 10734331,
|
44 |
+
"allocation.all.current": 520,
|
45 |
+
"allocation.all.freed": 10733811,
|
46 |
+
"allocation.all.peak": 777,
|
47 |
+
"allocation.large_pool.allocated": 409252,
|
48 |
+
"allocation.large_pool.current": 81,
|
49 |
+
"allocation.large_pool.freed": 409171,
|
50 |
+
"allocation.large_pool.peak": 183,
|
51 |
+
"allocation.small_pool.allocated": 10325079,
|
52 |
+
"allocation.small_pool.current": 439,
|
53 |
+
"allocation.small_pool.freed": 10324640,
|
54 |
+
"allocation.small_pool.peak": 696,
|
55 |
+
"inactive_split.all.allocated": 5866511,
|
56 |
+
"inactive_split.all.current": 48,
|
57 |
+
"inactive_split.all.freed": 5866463,
|
58 |
+
"inactive_split.all.peak": 123,
|
59 |
+
"inactive_split.large_pool.allocated": 347016,
|
60 |
+
"inactive_split.large_pool.current": 19,
|
61 |
+
"inactive_split.large_pool.freed": 346997,
|
62 |
+
"inactive_split.large_pool.peak": 23,
|
63 |
+
"inactive_split.small_pool.allocated": 5519495,
|
64 |
+
"inactive_split.small_pool.current": 29,
|
65 |
+
"inactive_split.small_pool.freed": 5519466,
|
66 |
+
"inactive_split.small_pool.peak": 103,
|
67 |
+
"inactive_split_bytes.all.allocated": 4801769484800,
|
68 |
+
"inactive_split_bytes.all.current": 60148736,
|
69 |
+
"inactive_split_bytes.all.freed": 4801709336064,
|
70 |
+
"inactive_split_bytes.all.peak": 149930496,
|
71 |
+
"inactive_split_bytes.large_pool.allocated": 1028775084032,
|
72 |
+
"inactive_split_bytes.large_pool.current": 48758784,
|
73 |
+
"inactive_split_bytes.large_pool.freed": 1028726325248,
|
74 |
+
"inactive_split_bytes.large_pool.peak": 75759616,
|
75 |
+
"inactive_split_bytes.small_pool.allocated": 3772994400768,
|
76 |
+
"inactive_split_bytes.small_pool.current": 11389952,
|
77 |
+
"inactive_split_bytes.small_pool.freed": 3772983010816,
|
78 |
+
"inactive_split_bytes.small_pool.peak": 82985472,
|
79 |
+
"max_split_size": -1,
|
80 |
+
"num_alloc_retries": 0,
|
81 |
+
"num_device_alloc": 97,
|
82 |
+
"num_device_free": 0,
|
83 |
+
"num_ooms": 0,
|
84 |
+
"num_sync_all_streams": 0,
|
85 |
+
"oversize_allocations.allocated": 0,
|
86 |
+
"oversize_allocations.current": 0,
|
87 |
+
"oversize_allocations.freed": 0,
|
88 |
+
"oversize_allocations.peak": 0,
|
89 |
+
"oversize_segments.allocated": 0,
|
90 |
+
"oversize_segments.current": 0,
|
91 |
+
"oversize_segments.freed": 0,
|
92 |
+
"oversize_segments.peak": 0,
|
93 |
+
"requested_bytes.all.allocated": 4318242086060,
|
94 |
+
"requested_bytes.all.current": 486046072,
|
95 |
+
"requested_bytes.all.freed": 4317756039988,
|
96 |
+
"requested_bytes.all.peak": 680092736,
|
97 |
+
"requested_bytes.large_pool.allocated": 744702728192,
|
98 |
+
"requested_bytes.large_pool.current": 463910912,
|
99 |
+
"requested_bytes.large_pool.freed": 744238817280,
|
100 |
+
"requested_bytes.large_pool.peak": 626776064,
|
101 |
+
"requested_bytes.small_pool.allocated": 3573539357868,
|
102 |
+
"requested_bytes.small_pool.current": 22135160,
|
103 |
+
"requested_bytes.small_pool.freed": 3573517222708,
|
104 |
+
"requested_bytes.small_pool.peak": 133501696,
|
105 |
+
"reserved_bytes.all.allocated": 805306368,
|
106 |
+
"reserved_bytes.all.current": 805306368,
|
107 |
+
"reserved_bytes.all.freed": 0,
|
108 |
+
"reserved_bytes.all.peak": 805306368,
|
109 |
+
"reserved_bytes.large_pool.allocated": 660602880,
|
110 |
+
"reserved_bytes.large_pool.current": 660602880,
|
111 |
+
"reserved_bytes.large_pool.freed": 0,
|
112 |
+
"reserved_bytes.large_pool.peak": 660602880,
|
113 |
+
"reserved_bytes.small_pool.allocated": 144703488,
|
114 |
+
"reserved_bytes.small_pool.current": 144703488,
|
115 |
+
"reserved_bytes.small_pool.freed": 0,
|
116 |
+
"reserved_bytes.small_pool.peak": 144703488,
|
117 |
+
"segment.all.allocated": 97,
|
118 |
+
"segment.all.current": 97,
|
119 |
+
"segment.all.freed": 0,
|
120 |
+
"segment.all.peak": 97,
|
121 |
+
"segment.large_pool.allocated": 28,
|
122 |
+
"segment.large_pool.current": 28,
|
123 |
+
"segment.large_pool.freed": 0,
|
124 |
+
"segment.large_pool.peak": 28,
|
125 |
+
"segment.small_pool.allocated": 69,
|
126 |
+
"segment.small_pool.current": 69,
|
127 |
+
"segment.small_pool.freed": 0,
|
128 |
+
"segment.small_pool.peak": 69
|
129 |
+
}
|
130 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/head_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config": {
|
3 |
+
"activation_function": "tanh",
|
4 |
+
"bias": true,
|
5 |
+
"dropout_prob": null,
|
6 |
+
"head_type": "classification",
|
7 |
+
"label2id": {
|
8 |
+
"LABEL_0": 0,
|
9 |
+
"LABEL_1": 1
|
10 |
+
},
|
11 |
+
"layers": 2,
|
12 |
+
"num_labels": 2,
|
13 |
+
"use_pooler": false
|
14 |
+
},
|
15 |
+
"hidden_size": 768,
|
16 |
+
"model_class": "BertAdapterModel",
|
17 |
+
"model_name": "bert-base-uncased",
|
18 |
+
"model_type": "bert",
|
19 |
+
"name": "cola",
|
20 |
+
"version": "0.2.1"
|
21 |
+
}
|
outputs/cola/bert-base-uncased_adapterstrain_val_0.0001_12345_8_10000/step_7499/pytorch_adapter.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:07fb7d71f073ed606c5e9dfaa871020fe4e42965c7c1fa167873fdc4c1c99901
|
3 |
+
size 7191062
|